lang
stringclasses 2
values | license
stringclasses 13
values | stderr
stringlengths 0
343
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 6
87.7k
| new_contents
stringlengths 0
6.23M
| new_file
stringlengths 3
311
| old_contents
stringlengths 0
6.23M
| message
stringlengths 6
9.1k
| old_file
stringlengths 3
311
| subject
stringlengths 0
4k
| git_diff
stringlengths 0
6.31M
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
Java | apache-2.0 | f56cfc6233285b823204d2e05d548afc6853f7d0 | 0 | RWTH-i5-IDSG/jamocha,RWTH-i5-IDSG/jamocha | /*
* Copyright 2002-2014 The Jamocha Team
*
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.jamocha.org/
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.jamocha.languages.clips.parser;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.stream.IntStream;
import java.util.stream.Stream;
import org.jamocha.dn.memory.SlotType;
import org.jamocha.dn.memory.Template;
import org.jamocha.dn.memory.Template.Slot;
import org.jamocha.filter.Function;
import org.jamocha.filter.FunctionDictionary;
import org.jamocha.filter.Predicate;
import org.jamocha.filter.fwa.PredicateWithArguments;
import org.jamocha.filter.fwa.PredicateWithArgumentsComposite;
import org.jamocha.languages.clips.parser.generated.Node;
import org.jamocha.languages.clips.parser.generated.SFPAndFunction;
import org.jamocha.languages.clips.parser.generated.SFPBooleanType;
import org.jamocha.languages.clips.parser.generated.SFPConstructDescription;
import org.jamocha.languages.clips.parser.generated.SFPDateTimeType;
import org.jamocha.languages.clips.parser.generated.SFPDefruleConstruct;
import org.jamocha.languages.clips.parser.generated.SFPDeftemplateConstruct;
import org.jamocha.languages.clips.parser.generated.SFPExpression;
import org.jamocha.languages.clips.parser.generated.SFPFalse;
import org.jamocha.languages.clips.parser.generated.SFPFloat;
import org.jamocha.languages.clips.parser.generated.SFPFloatType;
import org.jamocha.languages.clips.parser.generated.SFPInteger;
import org.jamocha.languages.clips.parser.generated.SFPIntegerType;
import org.jamocha.languages.clips.parser.generated.SFPNotFunction;
import org.jamocha.languages.clips.parser.generated.SFPOrFunction;
import org.jamocha.languages.clips.parser.generated.SFPParser;
import org.jamocha.languages.clips.parser.generated.SFPSingleSlotDefinition;
import org.jamocha.languages.clips.parser.generated.SFPSlotDefinition;
import org.jamocha.languages.clips.parser.generated.SFPStart;
import org.jamocha.languages.clips.parser.generated.SFPString;
import org.jamocha.languages.clips.parser.generated.SFPStringType;
import org.jamocha.languages.clips.parser.generated.SFPSymbol;
import org.jamocha.languages.clips.parser.generated.SFPSymbolType;
import org.jamocha.languages.clips.parser.generated.SFPTemplatePatternCE;
import org.jamocha.languages.clips.parser.generated.SFPTrue;
import org.jamocha.languages.clips.parser.generated.SFPTypeAttribute;
import org.jamocha.languages.clips.parser.generated.SFPTypeSpecification;
import org.jamocha.languages.common.ScopeStack;
import org.jamocha.languages.common.ScopeStack.Symbol;
public class SFPVisitorImpl implements SelectiveSFPVisitor {
final ScopeStack scope = new ScopeStack();
final HashMap<Symbol, Template> symbolTableTemplates = new HashMap<>();
final HashMap<Symbol, Function<?>> symbolTableFunctions = new HashMap<>();
@Override
public Object visit(SFPStart node, Object data) {
assert node.jjtGetNumChildren() == 1;
sendVisitor(new SFPStartVisitor(), node.jjtGetChild(0), data);
return data;
}
public <V extends SelectiveSFPVisitor, N extends Node> V sendVisitor(final V visitor,
final N node, final Object data) {
node.jjtAccept(visitor, data);
return visitor;
}
public Stream<Node> stream(final Node node, final int startIndex) {
return IntStream.range(startIndex, node.jjtGetNumChildren()).mapToObj(
i -> node.jjtGetChild(i));
}
final static EnumSet<SlotType> Number = EnumSet.of(SlotType.LONG, SlotType.DOUBLE);
final static EnumSet<SlotType> Constant = EnumSet.of(SlotType.NIL, SlotType.DATETIME,
SlotType.SYMBOL, SlotType.STRING, SlotType.LONG, SlotType.DOUBLE, SlotType.BOOLEAN);
class SFPSymbolVisitor implements SelectiveSFPVisitor {
Symbol symbol;
@Override
public Object visit(SFPSymbol node, Object data) {
this.symbol = scope.getOrCreate(node.jjtGetValue().toString());
return data;
};
}
class SFPStringVisitor implements SelectiveSFPVisitor {
String string;
@Override
public Object visit(SFPSymbol node, Object data) {
this.string = node.jjtGetValue().toString();
return data;
};
}
class SFPValueVisitor implements SelectiveSFPVisitor {
SlotType type;
Object value;
final EnumSet<SlotType> allowed;
public SFPValueVisitor(final SlotType firstAllowed, final SlotType... restAllowed) {
this.allowed = EnumSet.of(firstAllowed, restAllowed);
}
public SFPValueVisitor(final SlotType allowed) {
this.allowed = EnumSet.of(allowed);
}
public SFPValueVisitor(final EnumSet<SlotType> allowed) {
this.allowed = allowed;
}
public SFPValueVisitor() {
this.allowed = EnumSet.allOf(SlotType.class);
}
@Override
public Object visit(SFPFloat node, Object data) {
if (!allowed.contains(SlotType.DOUBLE))
return SFPVisitorImpl.this.visit(node, data);
this.type = SlotType.DOUBLE;
this.value = Double.parseDouble(node.jjtGetValue().toString());
return data;
}
@Override
public Object visit(SFPInteger node, Object data) {
if (!allowed.contains(SlotType.LONG))
return SFPVisitorImpl.this.visit(node, data);
this.type = SlotType.LONG;
this.value = Long.parseLong(node.jjtGetValue().toString());
return data;
}
@Override
public Object visit(SFPSymbol node, Object data) {
if (!allowed.contains(SlotType.SYMBOL))
return SFPVisitorImpl.this.visit(node, data);
this.type = SlotType.SYMBOL;
this.value = sendVisitor(new SFPSymbolVisitor(), node, data).symbol;
return data;
}
@Override
public Object visit(SFPTrue node, Object data) {
if (!allowed.contains(SlotType.BOOLEAN))
return SFPVisitorImpl.this.visit(node, data);
this.type = SlotType.BOOLEAN;
this.value = true;
return data;
}
@Override
public Object visit(SFPFalse node, Object data) {
if (!allowed.contains(SlotType.BOOLEAN))
return SFPVisitorImpl.this.visit(node, data);
this.type = SlotType.BOOLEAN;
this.value = false;
return data;
}
@Override
public Object visit(SFPString node, Object data) {
if (!allowed.contains(SlotType.STRING))
return SFPVisitorImpl.this.visit(node, data);
this.type = SlotType.STRING;
this.value = sendVisitor(new SFPStringVisitor(), node, data).string;
return data;
}
// TBD Nil, DateTime
}
class SFPTypeVisitor implements SelectiveSFPVisitor {
SlotType type;
final EnumSet<SlotType> allowed;
public SFPTypeVisitor(final SlotType firstAllowed, final SlotType... restAllowed) {
this.allowed = EnumSet.of(firstAllowed, restAllowed);
}
public SFPTypeVisitor(final SlotType allowed) {
this.allowed = EnumSet.of(allowed);
}
public SFPTypeVisitor(final EnumSet<SlotType> allowed) {
this.allowed = allowed;
}
public SFPTypeVisitor() {
this.allowed = EnumSet.allOf(SlotType.class);
}
@Override
public Object visit(SFPFloatType node, Object data) {
if (!allowed.contains(SlotType.DOUBLE))
return SFPVisitorImpl.this.visit(node, data);
this.type = SlotType.DOUBLE;
return data;
}
@Override
public Object visit(SFPIntegerType node, Object data) {
if (!allowed.contains(SlotType.LONG))
return SFPVisitorImpl.this.visit(node, data);
this.type = SlotType.LONG;
return data;
}
@Override
public Object visit(SFPSymbolType node, Object data) {
if (!allowed.contains(SlotType.SYMBOL))
return SFPVisitorImpl.this.visit(node, data);
this.type = SlotType.SYMBOL;
return data;
}
@Override
public Object visit(SFPBooleanType node, Object data) {
if (!allowed.contains(SlotType.BOOLEAN))
return SFPVisitorImpl.this.visit(node, data);
this.type = SlotType.BOOLEAN;
return data;
}
@Override
public Object visit(SFPStringType node, Object data) {
if (!allowed.contains(SlotType.STRING))
return SFPVisitorImpl.this.visit(node, data);
this.type = SlotType.STRING;
return data;
}
@Override
public Object visit(SFPDateTimeType node, Object data) {
if (!allowed.contains(SlotType.DATETIME))
return SFPVisitorImpl.this.visit(node, data);
this.type = SlotType.DATETIME;
return data;
}
// TBD LEXEME = STRING | SYMBOL, NUMBER = INTEGER | FLOAT
}
class SFPTypeSpecificationVisitor implements SelectiveSFPVisitor {
SlotType type;
// <type-specification> ::= <allowed-type>+ | ?VARIABLE
// void: ( ( AllowedType() )+ | VariableType() )
// <allowed-type> ::= SYMBOL | STRING | LEXEME | INTEGER | FLOAT | NUMBER |
// EXTERNAL-ADDRESS | FACT-ADDRESS | DATETIME
// void: ( SymbolType() | StringType() | DateTimeType() | LexemeType() | IntegerType() |
// FloatType() | NumberType() | BooleanType() )
@Override
public Object visit(SFPTypeSpecification node, Object data) {
if (node.jjtGetNumChildren() != 1)
throw new IllegalArgumentException(
"Restriction of template fields to multiple types is not supported at the moment!");
assert node.jjtGetNumChildren() == 1;
// TBD LEXEME = STRING | SYMBOL, NUMBER = INTEGER | FLOAT
this.type =
sendVisitor(
new SFPTypeVisitor(EnumSet.of(/* SlotType.LEXEME, */SlotType.SYMBOL,
SlotType.STRING, SlotType.DATETIME, SlotType.LONG,
SlotType.DOUBLE/* , SlotType.NUMBER */)), node.jjtGetChild(0),
data).type;
return data;
}
// TBD VariableType
}
class SFPTemplateAttributeVisitor implements SelectiveSFPVisitor {
SlotType slotType;
// <template-attribute> ::= <default-attribute> | <constraint-attribute>
// <LBRACE> ( DefaultAttribute() | DynamicAttribute() | ConstraintAttribute() ) <RBRACE>
// <constraint-attribute> ::= <type-attribute> | <allowed-constant-attribute> |
// <range-attribute> | <cardinality-attribute>
// ( TypeAttribute() | AllowedConstantAttribute() | RangeAttribute() |
// CardinalityAttribute() )
// <type-attribute> ::= (type <type-specification>)
// <TYPE> TypeSpecification()
@Override
public Object visit(SFPTypeAttribute node, Object data) {
assert node.jjtGetNumChildren() == 1;
this.slotType =
sendVisitor(new SFPTypeSpecificationVisitor(), node.jjtGetChild(0), data).type;
return data;
}
}
class SFPSingleSlotDefinitionVisitor implements SelectiveSFPVisitor {
Slot slot;
// <single-slot-definition> ::= ( slot <slot-name> <template-attribute>*)
// <SLOT> ( Symbol() ( TemplateAttribute() )* )
@Override
public Object visit(SFPSingleSlotDefinition node, Object data) {
assert node.jjtGetNumChildren() > 0;
if (node.jjtGetNumChildren() != 2)
throw new IllegalArgumentException(
"For now, slot definitions consist of a name and a type restriction!");
final Symbol name =
sendVisitor(new SFPSymbolVisitor(), node.jjtGetChild(0), data).symbol;
final SlotType type =
sendVisitor(new SFPTemplateAttributeVisitor(), node.jjtGetChild(1), data).slotType;
this.slot = new Slot(type, name.getImage());
return data;
}
}
class SFPDeftemplateConstructElementsVisitor implements SelectiveSFPVisitor {
String comment;
final LinkedList<Slot> slotDefinitions = new LinkedList<>();
// <comment> ::= <string>
@Override
public Object visit(SFPConstructDescription node, Object data) {
assert node.jjtGetNumChildren() == 1;
this.comment = sendVisitor(new SFPStringVisitor(), node.jjtGetChild(0), data).string;
return data;
};
// <slot-definition> ::= <single-slot-definition> | <multislot-definition>
@Override
public Object visit(SFPSlotDefinition node, Object data) {
assert node.jjtGetNumChildren() == 1;
// TBD add support for multislot-definition
this.slotDefinitions.add(sendVisitor(new SFPSingleSlotDefinitionVisitor(),
node.jjtGetChild(0), data).slot);
return data;
};
}
class ConditionalElement {
}
class SFPConditionalElementVisitor implements SelectiveSFPVisitor {
PredicateWithArguments conditionalElement;
// <conditional-element> ::= <pattern-CE> | <assigned-pattern-CE> | <not-CE> | <and-CE> |
// <or-CE> | <logical-CE> | <test-CE> | <exists-CE> | <forall-CE>
// void ConditionalElement() ( ( <LBRACE> ( TemplatePatternCE()| BooleanFunction() |
// LogicalCE() | TestCE() | ExistsCE() | ForallCE() ) <RBRACE> ) | AssignedPatternCE() )
// <template-pattern-CE> ::= (<deftemplate-name> <LHS-slot>*)
// TemplatePatternCE(): ( Symbol() ( (UnorderedLHSFactBody())+ | OrderedLHSFactBody() ) )
@Override
public Object visit(SFPTemplatePatternCE node, Object data) {
// TBD TemplatePatternCE
final Symbol symbol =
sendVisitor(new SFPSymbolVisitor(), node.jjtGetChild(0), data).symbol;
final Template template = SFPVisitorImpl.this.symbolTableTemplates.get(symbol);
final int slotIndex = template.getIndexByName("");
return data;
}
// <logical-CE> ::= (logical <conditional-element>+)
// LogicalCE() : <LOGICAL> ( ConditionalElement() )+
// TBD LogicalCE
@Override
public Object visit(SFPAndFunction node, Object data) {
assert node.jjtGetNumChildren() > 1;
final PredicateWithArguments[] conditionalElements =
stream(node, 0)
.map(n -> sendVisitor(new SFPConditionalElementVisitor(), n, data).conditionalElement)
.filter(c -> null != c).toArray(PredicateWithArguments[]::new);
final Predicate and =
FunctionDictionary.lookupPredicate(
org.jamocha.filter.impls.predicates.And.inClips,
SlotType.nCopies(SlotType.BOOLEAN, conditionalElements.length));
this.conditionalElement = new PredicateWithArgumentsComposite(and, conditionalElements);
return data;
}
@Override
public Object visit(SFPOrFunction node, Object data) {
assert node.jjtGetNumChildren() > 1;
final PredicateWithArguments[] conditionalElements =
stream(node, 0)
.map(n -> sendVisitor(new SFPConditionalElementVisitor(), n, data).conditionalElement)
.filter(c -> null != c).toArray(PredicateWithArguments[]::new);
final Predicate or =
FunctionDictionary.lookupPredicate(
org.jamocha.filter.impls.predicates.Or.inClips,
SlotType.nCopies(SlotType.BOOLEAN, conditionalElements.length));
this.conditionalElement = new PredicateWithArgumentsComposite(or, conditionalElements);
return data;
}
@Override
public Object visit(SFPNotFunction node, Object data) {
assert node.jjtGetNumChildren() == 1;
final PredicateWithArguments conditionalElement =
sendVisitor(new SFPConditionalElementVisitor(), node.jjtGetChild(0), data).conditionalElement;
final Predicate not =
FunctionDictionary.lookupPredicate(
org.jamocha.filter.impls.predicates.Or.inClips, SlotType.BOOLEAN);
this.conditionalElement = new PredicateWithArgumentsComposite(not, conditionalElement);
return data;
}
// <test-CE> ::= (test <function-call>)
// TestCE() : <TEST> FunctionCall()
// TBD TestCE
// <exists-CE> ::= (exists <conditional-element>+)
// ExistsCE() : <EXISTS> ( ConditionalElement() )+
// TBD ExistsCE
// <forall-CE> ::= (forall <conditional-element> <conditional-element>+)
// ForallCE() : <FORALL> ConditionalElement() ( LOOKAHEAD(2) ConditionalElement() )+
// TBD ForallCE
// <assigned-pattern-CE> ::= <single-field-variable> <- <pattern-CE>
// AssignedPatternCE(): ( SingleVariable() <ASSIGN> <LBRACE> TemplatePatternCE() <RBRACE> )
// TBD AssignedPatternCE
}
class SFPDefruleConstructElementVisitor extends SFPConditionalElementVisitor {
String comment;
// TBD ActionList, Declaration
// <defrule-construct> ::= (defrule <rule-name> [<comment>] [<declaration>]
// <conditional-element>* => <expression>*)
// <DEFRULE> Symbol() [ ConstructDescription() ] ( [ LOOKAHEAD(3) Declaration() ] (
// ConditionalElement() )* ) <ARROW> ActionList()
@Override
public Object visit(SFPConstructDescription node, Object data) {
assert node.jjtGetNumChildren() == 1;
this.comment = sendVisitor(new SFPStringVisitor(), node.jjtGetChild(0), data).string;
return data;
}
}
class SFPExpressionVisitor implements SelectiveSFPVisitor {
@Override
public Object visit(SFPExpression node, Object data) {
return data;
}
}
class SFPStartVisitor implements SelectiveSFPVisitor {
// Start() : Construct() | Expression()
// void Construct() : <LBRACE> ( DeftemplateConstruct() | DefglobalConstruct()
// | DefruleConstruct() | DeffunctionConstruct() | DefmoduleConstruct() ) <RBRACE>
// <comment> ::= <string>
@Override
public Object visit(SFPDeftemplateConstruct node, Object data) {
// <deftemplate-construct> ::= (deftemplate <deftemplate-name> [<comment>]
// <slot-definition>*)
// <DEFTEMPLATE> Symbol() [ ConstructDescription() ] ( SlotDefinition() )*
assert node.jjtGetNumChildren() > 0;
final Symbol symbol =
sendVisitor(new SFPSymbolVisitor(), node.jjtGetChild(0), data).symbol;
final SFPDeftemplateConstructElementsVisitor visitor =
new SFPDeftemplateConstructElementsVisitor();
for (int i = 1; i < node.jjtGetNumChildren(); ++i) {
node.jjtGetChild(i).jjtAccept(visitor, data);
}
final String comment = visitor.comment;
final Template template =
new Template(
comment,
visitor.slotDefinitions.toArray(new Slot[visitor.slotDefinitions.size()]));
SFPVisitorImpl.this.symbolTableTemplates.put(symbol, template);
return data;
};
@Override
public Object visit(SFPDefruleConstruct node, Object data) {
// <defrule-construct> ::= (defrule <rule-name> [<comment>] [<declaration>]
// <conditional-element>* => <expression>*)
// <DEFRULE> Symbol() [ ConstructDescription() ] ( [ LOOKAHEAD(3) Declaration() ] (
// ConditionalElement() )* ) <ARROW> ActionList()
assert node.jjtGetNumChildren() > 1;
final Symbol symbol =
sendVisitor(new SFPSymbolVisitor(), node.jjtGetChild(0), data).symbol;
final SFPDefruleConstructElementVisitor visitor =
new SFPDefruleConstructElementVisitor();
for (int i = 1; i < node.jjtGetNumChildren(); ++i) {
node.jjtGetChild(i).jjtAccept(visitor, data);
}
final String comment = visitor.comment;
// final Rule rule = new Rule(visitor.slotDefinitions.toArray(new
// Slot[visitor.slotDefinitions.size()]));
// SFPVisitorImpl.this.symbolTableRules.put(symbol, this.template);
return data;
};
@Override
public Object visit(SFPExpression node, Object data) {
sendVisitor(new SFPExpressionVisitor(), node, data);
return data;
};
}
public static void main(String[] args) {
boolean verbose = (args != null && args.length == 1 && "verbose".equals(args[0]));
if (!verbose)
System.out
.println("Note: For verbose output type \u005c"java Main verbose\u005c".\u005cn");
System.out.print("SFP> ");
SFPParser p = new SFPParser(System.in);
try {
while (true) {
SFPStart n = p.Start();
if (n == null)
System.exit(0);
n.dump(" ");
Object a = n.jjtAccept(new SFPVisitorImpl(), "");
System.out.println(a);
}
} catch (Exception e) {
System.err.println("ERROR[" + e.getClass().getSimpleName() + "]: " + e.getMessage());
// if (verbose)
e.printStackTrace();
}
}
}
| src/org/jamocha/languages/clips/parser/SFPVisitorImpl.java | /*
* Copyright 2002-2014 The Jamocha Team
*
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.jamocha.org/
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.jamocha.languages.clips.parser;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.function.IntFunction;
import java.util.stream.IntStream;
import org.jamocha.dn.memory.SlotType;
import org.jamocha.dn.memory.Template;
import org.jamocha.dn.memory.Template.Slot;
import org.jamocha.filter.Function;
import org.jamocha.filter.FunctionDictionary;
import org.jamocha.filter.Predicate;
import org.jamocha.filter.fwa.PredicateWithArguments;
import org.jamocha.filter.fwa.PredicateWithArgumentsComposite;
import org.jamocha.languages.clips.parser.SFPVisitorImpl.SFPConditionalElementVisitor;
import org.jamocha.languages.clips.parser.generated.Node;
import org.jamocha.languages.clips.parser.generated.SFPAndFunction;
import org.jamocha.languages.clips.parser.generated.SFPBooleanType;
import org.jamocha.languages.clips.parser.generated.SFPConstructDescription;
import org.jamocha.languages.clips.parser.generated.SFPDateTimeType;
import org.jamocha.languages.clips.parser.generated.SFPDefruleConstruct;
import org.jamocha.languages.clips.parser.generated.SFPDeftemplateConstruct;
import org.jamocha.languages.clips.parser.generated.SFPExpression;
import org.jamocha.languages.clips.parser.generated.SFPFalse;
import org.jamocha.languages.clips.parser.generated.SFPFloat;
import org.jamocha.languages.clips.parser.generated.SFPFloatType;
import org.jamocha.languages.clips.parser.generated.SFPInteger;
import org.jamocha.languages.clips.parser.generated.SFPIntegerType;
import org.jamocha.languages.clips.parser.generated.SFPNotFunction;
import org.jamocha.languages.clips.parser.generated.SFPOrFunction;
import org.jamocha.languages.clips.parser.generated.SFPParser;
import org.jamocha.languages.clips.parser.generated.SFPSingleSlotDefinition;
import org.jamocha.languages.clips.parser.generated.SFPSlotDefinition;
import org.jamocha.languages.clips.parser.generated.SFPStart;
import org.jamocha.languages.clips.parser.generated.SFPString;
import org.jamocha.languages.clips.parser.generated.SFPStringType;
import org.jamocha.languages.clips.parser.generated.SFPSymbol;
import org.jamocha.languages.clips.parser.generated.SFPSymbolType;
import org.jamocha.languages.clips.parser.generated.SFPTemplatePatternCE;
import org.jamocha.languages.clips.parser.generated.SFPTrue;
import org.jamocha.languages.clips.parser.generated.SFPTypeAttribute;
import org.jamocha.languages.clips.parser.generated.SFPTypeSpecification;
import org.jamocha.languages.common.ScopeStack;
import org.jamocha.languages.common.ScopeStack.Symbol;
public class SFPVisitorImpl implements SelectiveSFPVisitor {
final ScopeStack scope = new ScopeStack();
final HashMap<Symbol, Template> symbolTableTemplates = new HashMap<>();
final HashMap<Symbol, Function<?>> symbolTableFunctions = new HashMap<>();
@Override
public Object visit(SFPStart node, Object data) {
assert node.jjtGetNumChildren() == 1;
sendVisitor(new SFPStartVisitor(), node.jjtGetChild(0), data);
return data;
}
public <V extends SelectiveSFPVisitor, N extends Node> V sendVisitor(final V visitor,
final N node, final Object data) {
node.jjtAccept(visitor, data);
return visitor;
}
public Stream<Node> stream(final Node node, final int startIndex) {
return IntStream.range(startIndex, node.jjtGetNumChildren()).mapToObj(
i -> node.jjtGetChild(i));
}
final static EnumSet<SlotType> Number = EnumSet.of(SlotType.LONG, SlotType.DOUBLE);
final static EnumSet<SlotType> Constant = EnumSet.of(SlotType.NIL, SlotType.DATETIME,
SlotType.SYMBOL, SlotType.STRING, SlotType.LONG, SlotType.DOUBLE, SlotType.BOOLEAN);
class SFPSymbolVisitor implements SelectiveSFPVisitor {
Symbol symbol;
@Override
public Object visit(SFPSymbol node, Object data) {
this.symbol = scope.getOrCreate(node.jjtGetValue().toString());
return data;
};
}
class SFPStringVisitor implements SelectiveSFPVisitor {
String string;
@Override
public Object visit(SFPSymbol node, Object data) {
this.string = node.jjtGetValue().toString();
return data;
};
}
class SFPValueVisitor implements SelectiveSFPVisitor {
SlotType type;
Object value;
final EnumSet<SlotType> allowed;
public SFPValueVisitor(final SlotType firstAllowed, final SlotType... restAllowed) {
this.allowed = EnumSet.of(firstAllowed, restAllowed);
}
public SFPValueVisitor(final SlotType allowed) {
this.allowed = EnumSet.of(allowed);
}
public SFPValueVisitor(final EnumSet<SlotType> allowed) {
this.allowed = allowed;
}
public SFPValueVisitor() {
this.allowed = EnumSet.allOf(SlotType.class);
}
@Override
public Object visit(SFPFloat node, Object data) {
if (!allowed.contains(SlotType.DOUBLE))
return SFPVisitorImpl.this.visit(node, data);
this.type = SlotType.DOUBLE;
this.value = Double.parseDouble(node.jjtGetValue().toString());
return data;
}
@Override
public Object visit(SFPInteger node, Object data) {
if (!allowed.contains(SlotType.LONG))
return SFPVisitorImpl.this.visit(node, data);
this.type = SlotType.LONG;
this.value = Long.parseLong(node.jjtGetValue().toString());
return data;
}
@Override
public Object visit(SFPSymbol node, Object data) {
if (!allowed.contains(SlotType.SYMBOL))
return SFPVisitorImpl.this.visit(node, data);
this.type = SlotType.SYMBOL;
this.value = sendVisitor(new SFPSymbolVisitor(), node, data).symbol;
return data;
}
@Override
public Object visit(SFPTrue node, Object data) {
if (!allowed.contains(SlotType.BOOLEAN))
return SFPVisitorImpl.this.visit(node, data);
this.type = SlotType.BOOLEAN;
this.value = true;
return data;
}
@Override
public Object visit(SFPFalse node, Object data) {
if (!allowed.contains(SlotType.BOOLEAN))
return SFPVisitorImpl.this.visit(node, data);
this.type = SlotType.BOOLEAN;
this.value = false;
return data;
}
@Override
public Object visit(SFPString node, Object data) {
if (!allowed.contains(SlotType.STRING))
return SFPVisitorImpl.this.visit(node, data);
this.type = SlotType.STRING;
this.value = sendVisitor(new SFPStringVisitor(), node, data).string;
return data;
}
// TBD Nil, DateTime
}
class SFPTypeVisitor implements SelectiveSFPVisitor {
SlotType type;
final EnumSet<SlotType> allowed;
public SFPTypeVisitor(final SlotType firstAllowed, final SlotType... restAllowed) {
this.allowed = EnumSet.of(firstAllowed, restAllowed);
}
public SFPTypeVisitor(final SlotType allowed) {
this.allowed = EnumSet.of(allowed);
}
public SFPTypeVisitor(final EnumSet<SlotType> allowed) {
this.allowed = allowed;
}
public SFPTypeVisitor() {
this.allowed = EnumSet.allOf(SlotType.class);
}
@Override
public Object visit(SFPFloatType node, Object data) {
if (!allowed.contains(SlotType.DOUBLE))
return SFPVisitorImpl.this.visit(node, data);
this.type = SlotType.DOUBLE;
return data;
}
@Override
public Object visit(SFPIntegerType node, Object data) {
if (!allowed.contains(SlotType.LONG))
return SFPVisitorImpl.this.visit(node, data);
this.type = SlotType.LONG;
return data;
}
@Override
public Object visit(SFPSymbolType node, Object data) {
if (!allowed.contains(SlotType.SYMBOL))
return SFPVisitorImpl.this.visit(node, data);
this.type = SlotType.SYMBOL;
return data;
}
@Override
public Object visit(SFPBooleanType node, Object data) {
if (!allowed.contains(SlotType.BOOLEAN))
return SFPVisitorImpl.this.visit(node, data);
this.type = SlotType.BOOLEAN;
return data;
}
@Override
public Object visit(SFPStringType node, Object data) {
if (!allowed.contains(SlotType.STRING))
return SFPVisitorImpl.this.visit(node, data);
this.type = SlotType.STRING;
return data;
}
@Override
public Object visit(SFPDateTimeType node, Object data) {
if (!allowed.contains(SlotType.DATETIME))
return SFPVisitorImpl.this.visit(node, data);
this.type = SlotType.DATETIME;
return data;
}
// TBD LEXEME = STRING | SYMBOL, NUMBER = INTEGER | FLOAT
}
class SFPTypeSpecificationVisitor implements SelectiveSFPVisitor {
SlotType type;
// <type-specification> ::= <allowed-type>+ | ?VARIABLE
// void: ( ( AllowedType() )+ | VariableType() )
// <allowed-type> ::= SYMBOL | STRING | LEXEME | INTEGER | FLOAT | NUMBER |
// EXTERNAL-ADDRESS | FACT-ADDRESS | DATETIME
// void: ( SymbolType() | StringType() | DateTimeType() | LexemeType() | IntegerType() |
// FloatType() | NumberType() | BooleanType() )
@Override
public Object visit(SFPTypeSpecification node, Object data) {
if (node.jjtGetNumChildren() != 1)
throw new IllegalArgumentException(
"Restriction of template fields to multiple types is not supported at the moment!");
assert node.jjtGetNumChildren() == 1;
// TBD LEXEME = STRING | SYMBOL, NUMBER = INTEGER | FLOAT
this.type =
sendVisitor(
new SFPTypeVisitor(EnumSet.of(/* SlotType.LEXEME, */SlotType.SYMBOL,
SlotType.STRING, SlotType.DATETIME, SlotType.LONG,
SlotType.DOUBLE/* , SlotType.NUMBER */)), node.jjtGetChild(0),
data).type;
return data;
}
// TBD VariableType
}
class SFPTemplateAttributeVisitor implements SelectiveSFPVisitor {
SlotType slotType;
// <template-attribute> ::= <default-attribute> | <constraint-attribute>
// <LBRACE> ( DefaultAttribute() | DynamicAttribute() | ConstraintAttribute() ) <RBRACE>
// <constraint-attribute> ::= <type-attribute> | <allowed-constant-attribute> |
// <range-attribute> | <cardinality-attribute>
// ( TypeAttribute() | AllowedConstantAttribute() | RangeAttribute() |
// CardinalityAttribute() )
// <type-attribute> ::= (type <type-specification>)
// <TYPE> TypeSpecification()
@Override
public Object visit(SFPTypeAttribute node, Object data) {
assert node.jjtGetNumChildren() == 1;
this.slotType =
sendVisitor(new SFPTypeSpecificationVisitor(), node.jjtGetChild(0), data).type;
return data;
}
}
class SFPSingleSlotDefinitionVisitor implements SelectiveSFPVisitor {
Slot slot;
// <single-slot-definition> ::= ( slot <slot-name> <template-attribute>*)
// <SLOT> ( Symbol() ( TemplateAttribute() )* )
@Override
public Object visit(SFPSingleSlotDefinition node, Object data) {
assert node.jjtGetNumChildren() > 0;
if (node.jjtGetNumChildren() != 2)
throw new IllegalArgumentException(
"For now, slot definitions consist of a name and a type restriction!");
final Symbol name =
sendVisitor(new SFPSymbolVisitor(), node.jjtGetChild(0), data).symbol;
final SlotType type =
sendVisitor(new SFPTemplateAttributeVisitor(), node.jjtGetChild(1), data).slotType;
this.slot = new Slot(type, name.getImage());
return data;
}
}
class SFPDeftemplateConstructElementsVisitor implements SelectiveSFPVisitor {
String comment;
final LinkedList<Slot> slotDefinitions = new LinkedList<>();
// <comment> ::= <string>
@Override
public Object visit(SFPConstructDescription node, Object data) {
assert node.jjtGetNumChildren() == 1;
this.comment = sendVisitor(new SFPStringVisitor(), node.jjtGetChild(0), data).string;
return data;
};
// <slot-definition> ::= <single-slot-definition> | <multislot-definition>
@Override
public Object visit(SFPSlotDefinition node, Object data) {
assert node.jjtGetNumChildren() == 1;
// TBD add support for multislot-definition
this.slotDefinitions.add(sendVisitor(new SFPSingleSlotDefinitionVisitor(),
node.jjtGetChild(0), data).slot);
return data;
};
}
class ConditionalElement {
}
class SFPConditionalElementVisitor implements SelectiveSFPVisitor {
PredicateWithArguments conditionalElement;
// <conditional-element> ::= <pattern-CE> | <assigned-pattern-CE> | <not-CE> | <and-CE> |
// <or-CE> | <logical-CE> | <test-CE> | <exists-CE> | <forall-CE>
// void ConditionalElement() ( ( <LBRACE> ( TemplatePatternCE()| BooleanFunction() |
// LogicalCE() | TestCE() | ExistsCE() | ForallCE() ) <RBRACE> ) | AssignedPatternCE() )
// <template-pattern-CE> ::= (<deftemplate-name> <LHS-slot>*)
// TemplatePatternCE(): ( Symbol() ( (UnorderedLHSFactBody())+ | OrderedLHSFactBody() ) )
@Override
public Object visit(SFPTemplatePatternCE node, Object data) {
// TBD TemplatePatternCE
final Symbol symbol =
sendVisitor(new SFPSymbolVisitor(), node.jjtGetChild(0), data).symbol;
final Template template = SFPVisitorImpl.this.symbolTableTemplates.get(symbol);
final int slotIndex = template.getIndexByName("");
return data;
}
// <logical-CE> ::= (logical <conditional-element>+)
// LogicalCE() : <LOGICAL> ( ConditionalElement() )+
// TBD LogicalCE
@Override
public Object visit(SFPAndFunction node, Object data) {
assert node.jjtGetNumChildren() > 1;
final PredicateWithArguments[] conditionalElements =
stream(node, 0)
.map(n -> sendVisitor(new SFPConditionalElementVisitor(), n, data).conditionalElement)
.filter(c -> null != c).toArray(PredicateWithArguments[]::new);
final Predicate and =
FunctionDictionary.lookupPredicate(
org.jamocha.filter.impls.predicates.And.inClips,
SlotType.nCopies(SlotType.BOOLEAN, conditionalElements.length));
this.conditionalElement = new PredicateWithArgumentsComposite(and, conditionalElements);
return data;
}
@Override
public Object visit(SFPOrFunction node, Object data) {
assert node.jjtGetNumChildren() > 1;
final PredicateWithArguments[] conditionalElements =
stream(node, 0)
.map(n -> sendVisitor(new SFPConditionalElementVisitor(), n, data).conditionalElement)
.filter(c -> null != c).toArray(PredicateWithArguments[]::new);
final Predicate or =
FunctionDictionary.lookupPredicate(
org.jamocha.filter.impls.predicates.Or.inClips,
SlotType.nCopies(SlotType.BOOLEAN, conditionalElements.length));
this.conditionalElement = new PredicateWithArgumentsComposite(or, conditionalElements);
return data;
}
@Override
public Object visit(SFPNotFunction node, Object data) {
assert node.jjtGetNumChildren() == 1;
final PredicateWithArguments conditionalElement =
sendVisitor(new SFPConditionalElementVisitor(), node.jjtGetChild(0), data).conditionalElement;
final Predicate not =
FunctionDictionary.lookupPredicate(
org.jamocha.filter.impls.predicates.Or.inClips, SlotType.BOOLEAN);
this.conditionalElement = new PredicateWithArgumentsComposite(not, conditionalElement);
return data;
}
// <test-CE> ::= (test <function-call>)
// TestCE() : <TEST> FunctionCall()
// TBD TestCE
// <exists-CE> ::= (exists <conditional-element>+)
// ExistsCE() : <EXISTS> ( ConditionalElement() )+
// TBD ExistsCE
// <forall-CE> ::= (forall <conditional-element> <conditional-element>+)
// ForallCE() : <FORALL> ConditionalElement() ( LOOKAHEAD(2) ConditionalElement() )+
// TBD ForallCE
// <assigned-pattern-CE> ::= <single-field-variable> <- <pattern-CE>
// AssignedPatternCE(): ( SingleVariable() <ASSIGN> <LBRACE> TemplatePatternCE() <RBRACE> )
// TBD AssignedPatternCE
}
class SFPDefruleConstructElementVisitor extends SFPConditionalElementVisitor {
String comment;
// TBD ActionList, Declaration
// <defrule-construct> ::= (defrule <rule-name> [<comment>] [<declaration>]
// <conditional-element>* => <expression>*)
// <DEFRULE> Symbol() [ ConstructDescription() ] ( [ LOOKAHEAD(3) Declaration() ] (
// ConditionalElement() )* ) <ARROW> ActionList()
@Override
public Object visit(SFPConstructDescription node, Object data) {
assert node.jjtGetNumChildren() == 1;
this.comment = sendVisitor(new SFPStringVisitor(), node.jjtGetChild(0), data).string;
return data;
}
}
class SFPExpressionVisitor implements SelectiveSFPVisitor {
@Override
public Object visit(SFPExpression node, Object data) {
return data;
}
}
class SFPStartVisitor implements SelectiveSFPVisitor {
// Start() : Construct() | Expression()
// void Construct() : <LBRACE> ( DeftemplateConstruct() | DefglobalConstruct()
// | DefruleConstruct() | DeffunctionConstruct() | DefmoduleConstruct() ) <RBRACE>
// <comment> ::= <string>
@Override
public Object visit(SFPDeftemplateConstruct node, Object data) {
// <deftemplate-construct> ::= (deftemplate <deftemplate-name> [<comment>]
// <slot-definition>*)
// <DEFTEMPLATE> Symbol() [ ConstructDescription() ] ( SlotDefinition() )*
assert node.jjtGetNumChildren() > 0;
final Symbol symbol =
sendVisitor(new SFPSymbolVisitor(), node.jjtGetChild(0), data).symbol;
final SFPDeftemplateConstructElementsVisitor visitor =
new SFPDeftemplateConstructElementsVisitor();
for (int i = 1; i < node.jjtGetNumChildren(); ++i) {
node.jjtGetChild(i).jjtAccept(visitor, data);
}
final String comment = visitor.comment;
final Template template =
new Template(
comment,
visitor.slotDefinitions.toArray(new Slot[visitor.slotDefinitions.size()]));
SFPVisitorImpl.this.symbolTableTemplates.put(symbol, template);
return data;
};
@Override
public Object visit(SFPDefruleConstruct node, Object data) {
// <defrule-construct> ::= (defrule <rule-name> [<comment>] [<declaration>]
// <conditional-element>* => <expression>*)
// <DEFRULE> Symbol() [ ConstructDescription() ] ( [ LOOKAHEAD(3) Declaration() ] (
// ConditionalElement() )* ) <ARROW> ActionList()
assert node.jjtGetNumChildren() > 1;
final Symbol symbol =
sendVisitor(new SFPSymbolVisitor(), node.jjtGetChild(0), data).symbol;
final SFPDefruleConstructElementVisitor visitor =
new SFPDefruleConstructElementVisitor();
for (int i = 1; i < node.jjtGetNumChildren(); ++i) {
node.jjtGetChild(i).jjtAccept(visitor, data);
}
final String comment = visitor.comment;
// final Rule rule = new Rule(visitor.slotDefinitions.toArray(new
// Slot[visitor.slotDefinitions.size()]));
// SFPVisitorImpl.this.symbolTableRules.put(symbol, this.template);
return data;
};
@Override
public Object visit(SFPExpression node, Object data) {
sendVisitor(new SFPExpressionVisitor(), node, data);
return data;
};
}
public static void main(String[] args) {
boolean verbose = (args != null && args.length == 1 && "verbose".equals(args[0]));
if (!verbose)
System.out
.println("Note: For verbose output type \u005c"java Main verbose\u005c".\u005cn");
System.out.print("SFP> ");
SFPParser p = new SFPParser(System.in);
try {
while (true) {
SFPStart n = p.Start();
if (n == null)
System.exit(0);
n.dump(" ");
Object a = n.jjtAccept(new SFPVisitorImpl(), "");
System.out.println(a);
}
} catch (Exception e) {
System.err.println("ERROR[" + e.getClass().getSimpleName() + "]: " + e.getMessage());
// if (verbose)
e.printStackTrace();
}
}
}
| organized imports
| src/org/jamocha/languages/clips/parser/SFPVisitorImpl.java | organized imports | <ide><path>rc/org/jamocha/languages/clips/parser/SFPVisitorImpl.java
<ide> import java.util.EnumSet;
<ide> import java.util.HashMap;
<ide> import java.util.LinkedList;
<del>import java.util.function.IntFunction;
<ide> import java.util.stream.IntStream;
<add>import java.util.stream.Stream;
<ide>
<ide> import org.jamocha.dn.memory.SlotType;
<ide> import org.jamocha.dn.memory.Template;
<ide> import org.jamocha.filter.Predicate;
<ide> import org.jamocha.filter.fwa.PredicateWithArguments;
<ide> import org.jamocha.filter.fwa.PredicateWithArgumentsComposite;
<del>import org.jamocha.languages.clips.parser.SFPVisitorImpl.SFPConditionalElementVisitor;
<ide> import org.jamocha.languages.clips.parser.generated.Node;
<ide> import org.jamocha.languages.clips.parser.generated.SFPAndFunction;
<ide> import org.jamocha.languages.clips.parser.generated.SFPBooleanType; |
|
Java | apache-2.0 | 02da06dd9f2d962e1db85d265b97ac3e269a60fd | 0 | apurtell/hadoop,apache/hadoop,steveloughran/hadoop,mapr/hadoop-common,mapr/hadoop-common,JingchengDu/hadoop,nandakumar131/hadoop,apurtell/hadoop,steveloughran/hadoop,plusplusjiajia/hadoop,wwjiang007/hadoop,lukmajercak/hadoop,lukmajercak/hadoop,lukmajercak/hadoop,JingchengDu/hadoop,apurtell/hadoop,apache/hadoop,steveloughran/hadoop,nandakumar131/hadoop,mapr/hadoop-common,apurtell/hadoop,wwjiang007/hadoop,apurtell/hadoop,mapr/hadoop-common,nandakumar131/hadoop,plusplusjiajia/hadoop,wwjiang007/hadoop,nandakumar131/hadoop,nandakumar131/hadoop,steveloughran/hadoop,mapr/hadoop-common,apache/hadoop,JingchengDu/hadoop,wwjiang007/hadoop,JingchengDu/hadoop,wwjiang007/hadoop,plusplusjiajia/hadoop,JingchengDu/hadoop,apurtell/hadoop,mapr/hadoop-common,steveloughran/hadoop,lukmajercak/hadoop,plusplusjiajia/hadoop,lukmajercak/hadoop,JingchengDu/hadoop,lukmajercak/hadoop,apurtell/hadoop,mapr/hadoop-common,nandakumar131/hadoop,apache/hadoop,nandakumar131/hadoop,steveloughran/hadoop,plusplusjiajia/hadoop,wwjiang007/hadoop,apache/hadoop,lukmajercak/hadoop,apache/hadoop,wwjiang007/hadoop,plusplusjiajia/hadoop,JingchengDu/hadoop,steveloughran/hadoop,apache/hadoop,plusplusjiajia/hadoop | hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/scm/TestContainerSQLCli.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright containerOwnership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.scm;
import org.apache.hadoop.hdds.HddsConfigKeys;
import org.apache.hadoop.hdds.scm.container.ContainerManager;
import org.apache.hadoop.hdds.scm.container.SCMContainerManager;
import org.apache.hadoop.hdds.scm.container.common.helpers.ExcludeList;
import org.apache.hadoop.hdds.scm.events.SCMEvents;
import org.apache.hadoop.hdds.scm.node.NodeManager;
import org.apache.hadoop.hdds.scm.pipeline.PipelineManager;
import org.apache.hadoop.hdds.scm.pipeline.SCMPipelineManager;
import org.apache.hadoop.hdds.server.events.EventQueue;
import org.apache.hadoop.ozone.MiniOzoneCluster;
import org.apache.hadoop.ozone.OzoneConfigKeys;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.hdds.scm.block.BlockManagerImpl;
import org.apache.hadoop.hdds.scm.container.placement.algorithms.ContainerPlacementPolicy;
import org.apache.hadoop.hdds.scm.container.placement.algorithms.SCMContainerPlacementCapacity;
import org.apache.hadoop.hdds.scm.ScmConfigKeys;
import org.apache.hadoop.hdds.scm.container.common.helpers.AllocatedBlock;
import org.apache.hadoop.ozone.scm.cli.SQLCLI;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.UUID;
import static org.apache.hadoop.ozone.OzoneConsts.SCM_CONTAINER_DB;
import static org.apache.hadoop.ozone.OzoneConsts.KB;
import static org.junit.Assert.assertEquals;
/**
* This class tests the CLI that transforms container into SQLite DB files.
*/
@RunWith(Parameterized.class)
public class TestContainerSQLCli {
private EventQueue eventQueue;
@Parameterized.Parameters
public static Collection<Object[]> data() {
return Arrays.asList(new Object[][] {
{OzoneConfigKeys.OZONE_METADATA_STORE_IMPL_LEVELDB},
{OzoneConfigKeys.OZONE_METADATA_STORE_IMPL_ROCKSDB}
});
}
private static String metaStoreType;
public TestContainerSQLCli(String type) {
metaStoreType = type;
}
private static SQLCLI cli;
private MiniOzoneCluster cluster;
private OzoneConfiguration conf;
private String datanodeIpAddress;
private ContainerManager containerManager;
private NodeManager nodeManager;
private BlockManagerImpl blockManager;
private HashMap<Long, Long> blockContainerMap;
private final static long DEFAULT_BLOCK_SIZE = 4 * KB;
private static HddsProtos.ReplicationFactor factor;
private static HddsProtos.ReplicationType type;
private static final String CONTAINER_OWNER = "OZONE";
@Before
public void setup() throws Exception {
blockContainerMap = new HashMap<>();
conf = new OzoneConfiguration();
conf.setInt(ScmConfigKeys.OZONE_SCM_PIPELINE_OWNER_CONTAINER_COUNT, 2);
conf.setClass(ScmConfigKeys.OZONE_SCM_CONTAINER_PLACEMENT_IMPL_KEY,
SCMContainerPlacementCapacity.class, ContainerPlacementPolicy.class);
if(conf.getBoolean(ScmConfigKeys.DFS_CONTAINER_RATIS_ENABLED_KEY,
ScmConfigKeys.DFS_CONTAINER_RATIS_ENABLED_DEFAULT)){
factor = HddsProtos.ReplicationFactor.THREE;
type = HddsProtos.ReplicationType.RATIS;
} else {
factor = HddsProtos.ReplicationFactor.ONE;
type = HddsProtos.ReplicationType.STAND_ALONE;
}
cluster = MiniOzoneCluster.newBuilder(conf).setNumDatanodes(2).build();
cluster.waitForClusterToBeReady();
datanodeIpAddress = cluster.getHddsDatanodes().get(0)
.getDatanodeDetails().getIpAddress();
cluster.getOzoneManager().stop();
cluster.getStorageContainerManager().stop();
eventQueue = new EventQueue();
nodeManager = cluster.getStorageContainerManager().getScmNodeManager();
PipelineManager pipelineManager =
new SCMPipelineManager(conf, nodeManager, eventQueue);
containerManager = new SCMContainerManager(conf, nodeManager,
pipelineManager, eventQueue);
blockManager =
new BlockManagerImpl(conf, cluster.getStorageContainerManager());
eventQueue.addHandler(SCMEvents.CHILL_MODE_STATUS, blockManager);
eventQueue.fireEvent(SCMEvents.CHILL_MODE_STATUS, false);
GenericTestUtils.waitFor(() -> {
return !blockManager.isScmInChillMode();
}, 10, 1000 * 15);
// blockManager.allocateBlock() will create containers if there is none
// stored in levelDB. The number of containers to create is the value of
// OZONE_SCM_PIPELINE_OWNER_CONTAINER_COUNT which we set to 2.
// so the first allocateBlock() will create two containers. A random one
// is assigned for the block.
// loop until both the two datanodes are up, try up to about 4 seconds.
for (int c = 0; c < 40; c++) {
if (nodeManager.getAllNodes().size() == 2) {
break;
}
Thread.sleep(100);
}
assertEquals(2, nodeManager.getAllNodes().size());
AllocatedBlock ab1 = blockManager.allocateBlock(DEFAULT_BLOCK_SIZE, type,
factor, CONTAINER_OWNER, new ExcludeList());
blockContainerMap.put(ab1.getBlockID().getLocalID(),
ab1.getBlockID().getContainerID());
AllocatedBlock ab2;
// we want the two blocks on the two provisioned containers respectively,
// however blockManager picks containers randomly, keep retry until we
// assign the second block to the other container. This seems to be the only
// way to get the two containers.
// although each retry will create a block and assign to a container. So
// the size of blockContainerMap will vary each time the test is run.
while (true) {
ab2 = blockManager
.allocateBlock(DEFAULT_BLOCK_SIZE, type, factor, CONTAINER_OWNER,
new ExcludeList());
blockContainerMap.put(ab2.getBlockID().getLocalID(),
ab2.getBlockID().getContainerID());
if (ab1.getBlockID().getContainerID() !=
ab2.getBlockID().getContainerID()) {
break;
}
}
blockManager.close();
containerManager.close();
nodeManager.close();
conf.set(OzoneConfigKeys.OZONE_METADATA_STORE_IMPL, metaStoreType);
cli = new SQLCLI(conf);
}
@After
public void shutdown() throws InterruptedException {
if (cluster != null) {
cluster.shutdown();
}
}
@Test
public void testConvertContainerDB() throws Exception {
String dbOutPath = GenericTestUtils.getTempPath(
UUID.randomUUID() + "/out_sql.db");
// TODO : the following will fail due to empty Datanode list, need to fix.
//String dnUUID = cluster.getDataNodes().get(0).getUuid();
String dbRootPath = conf.get(HddsConfigKeys.OZONE_METADATA_DIRS);
String dbPath = dbRootPath + "/" + SCM_CONTAINER_DB;
String[] args = {"-p", dbPath, "-o", dbOutPath};
Connection conn;
String sql;
ResultSet rs;
cli.run(args);
//verify the sqlite db
// only checks the container names are as expected. Because other fields
// such as datanode UUID are generated randomly each time
conn = connectDB(dbOutPath);
sql = "SELECT * FROM containerInfo";
rs = executeQuery(conn, sql);
ArrayList<Long> containerIDs = new ArrayList<>();
while (rs.next()) {
containerIDs.add(rs.getLong("containerID"));
//assertEquals(dnUUID, rs.getString("leaderUUID"));
}
/* TODO: fix this later when the SQLCLI is fixed.
assertTrue(containerIDs.size() == 2 &&
containerIDs.contains(pipeline1.getContainerName()) &&
containerIDs.contains(pipeline2.getContainerName()));
sql = "SELECT * FROM containerMembers";
rs = executeQuery(conn, sql);
containerIDs = new ArrayList<>();
while (rs.next()) {
containerIDs.add(rs.getLong("containerID"));
//assertEquals(dnUUID, rs.getString("datanodeUUID"));
}
assertTrue(containerIDs.size() == 2 &&
containerIDs.contains(pipeline1.getContainerName()) &&
containerIDs.contains(pipeline2.getContainerName()));
sql = "SELECT * FROM datanodeInfo";
rs = executeQuery(conn, sql);
int count = 0;
while (rs.next()) {
assertEquals(datanodeIpAddress, rs.getString("ipAddress"));
//assertEquals(dnUUID, rs.getString("datanodeUUID"));
count += 1;
}
// the two containers maybe on the same datanode, maybe not.
int expected = pipeline1.getLeader().getUuid().equals(
pipeline2.getLeader().getUuid())? 1 : 2;
assertEquals(expected, count);
*/
Files.delete(Paths.get(dbOutPath));
}
private ResultSet executeQuery(Connection conn, String sql)
throws SQLException {
Statement stmt = conn.createStatement();
return stmt.executeQuery(sql);
}
private Connection connectDB(String dbPath) throws Exception {
Class.forName("org.sqlite.JDBC");
String connectPath =
String.format("jdbc:sqlite:%s", dbPath);
return DriverManager.getConnection(connectPath);
}
}
| HDDS-1222. Remove TestContainerSQLCli unit test stub. Contributed by Elek, Marton.
| hadoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/scm/TestContainerSQLCli.java | HDDS-1222. Remove TestContainerSQLCli unit test stub. Contributed by Elek, Marton. | <ide><path>adoop-ozone/tools/src/test/java/org/apache/hadoop/ozone/scm/TestContainerSQLCli.java
<del>/**
<del> * Licensed to the Apache Software Foundation (ASF) under one
<del> * or more contributor license agreements. See the NOTICE file
<del> * distributed with this work for additional information
<del> * regarding copyright containerOwnership. The ASF licenses this file
<del> * to you under the Apache License, Version 2.0 (the
<del> * "License"); you may not use this file except in compliance
<del> * with the License. You may obtain a copy of the License at
<del> *
<del> * http://www.apache.org/licenses/LICENSE-2.0
<del> *
<del> * Unless required by applicable law or agreed to in writing, software
<del> * distributed under the License is distributed on an "AS IS" BASIS,
<del> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<del> * See the License for the specific language governing permissions and
<del> * limitations under the License.
<del> */
<del>package org.apache.hadoop.ozone.scm;
<del>
<del>import org.apache.hadoop.hdds.HddsConfigKeys;
<del>import org.apache.hadoop.hdds.scm.container.ContainerManager;
<del>import org.apache.hadoop.hdds.scm.container.SCMContainerManager;
<del>import org.apache.hadoop.hdds.scm.container.common.helpers.ExcludeList;
<del>import org.apache.hadoop.hdds.scm.events.SCMEvents;
<del>import org.apache.hadoop.hdds.scm.node.NodeManager;
<del>import org.apache.hadoop.hdds.scm.pipeline.PipelineManager;
<del>import org.apache.hadoop.hdds.scm.pipeline.SCMPipelineManager;
<del>import org.apache.hadoop.hdds.server.events.EventQueue;
<del>import org.apache.hadoop.ozone.MiniOzoneCluster;
<del>import org.apache.hadoop.ozone.OzoneConfigKeys;
<del>import org.apache.hadoop.hdds.conf.OzoneConfiguration;
<del>import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
<del>import org.apache.hadoop.hdds.scm.block.BlockManagerImpl;
<del>import org.apache.hadoop.hdds.scm.container.placement.algorithms.ContainerPlacementPolicy;
<del>import org.apache.hadoop.hdds.scm.container.placement.algorithms.SCMContainerPlacementCapacity;
<del>import org.apache.hadoop.hdds.scm.ScmConfigKeys;
<del>import org.apache.hadoop.hdds.scm.container.common.helpers.AllocatedBlock;
<del>import org.apache.hadoop.ozone.scm.cli.SQLCLI;
<del>import org.apache.hadoop.test.GenericTestUtils;
<del>import org.junit.After;
<del>import org.junit.Before;
<del>import org.junit.Test;
<del>import org.junit.runner.RunWith;
<del>import org.junit.runners.Parameterized;
<del>
<del>import java.nio.file.Files;
<del>import java.nio.file.Paths;
<del>import java.sql.Connection;
<del>import java.sql.DriverManager;
<del>import java.sql.ResultSet;
<del>import java.sql.SQLException;
<del>import java.sql.Statement;
<del>import java.util.ArrayList;
<del>import java.util.Arrays;
<del>import java.util.Collection;
<del>import java.util.HashMap;
<del>import java.util.UUID;
<del>
<del>import static org.apache.hadoop.ozone.OzoneConsts.SCM_CONTAINER_DB;
<del>import static org.apache.hadoop.ozone.OzoneConsts.KB;
<del>import static org.junit.Assert.assertEquals;
<del>
<del>/**
<del> * This class tests the CLI that transforms container into SQLite DB files.
<del> */
<del>@RunWith(Parameterized.class)
<del>public class TestContainerSQLCli {
<del>
<del> private EventQueue eventQueue;
<del>
<del> @Parameterized.Parameters
<del> public static Collection<Object[]> data() {
<del> return Arrays.asList(new Object[][] {
<del> {OzoneConfigKeys.OZONE_METADATA_STORE_IMPL_LEVELDB},
<del> {OzoneConfigKeys.OZONE_METADATA_STORE_IMPL_ROCKSDB}
<del> });
<del> }
<del>
<del> private static String metaStoreType;
<del>
<del> public TestContainerSQLCli(String type) {
<del> metaStoreType = type;
<del> }
<del>
<del> private static SQLCLI cli;
<del>
<del> private MiniOzoneCluster cluster;
<del> private OzoneConfiguration conf;
<del> private String datanodeIpAddress;
<del>
<del> private ContainerManager containerManager;
<del> private NodeManager nodeManager;
<del> private BlockManagerImpl blockManager;
<del>
<del> private HashMap<Long, Long> blockContainerMap;
<del>
<del> private final static long DEFAULT_BLOCK_SIZE = 4 * KB;
<del> private static HddsProtos.ReplicationFactor factor;
<del> private static HddsProtos.ReplicationType type;
<del> private static final String CONTAINER_OWNER = "OZONE";
<del>
<del>
<del> @Before
<del> public void setup() throws Exception {
<del> blockContainerMap = new HashMap<>();
<del>
<del> conf = new OzoneConfiguration();
<del> conf.setInt(ScmConfigKeys.OZONE_SCM_PIPELINE_OWNER_CONTAINER_COUNT, 2);
<del> conf.setClass(ScmConfigKeys.OZONE_SCM_CONTAINER_PLACEMENT_IMPL_KEY,
<del> SCMContainerPlacementCapacity.class, ContainerPlacementPolicy.class);
<del> if(conf.getBoolean(ScmConfigKeys.DFS_CONTAINER_RATIS_ENABLED_KEY,
<del> ScmConfigKeys.DFS_CONTAINER_RATIS_ENABLED_DEFAULT)){
<del> factor = HddsProtos.ReplicationFactor.THREE;
<del> type = HddsProtos.ReplicationType.RATIS;
<del> } else {
<del> factor = HddsProtos.ReplicationFactor.ONE;
<del> type = HddsProtos.ReplicationType.STAND_ALONE;
<del> }
<del> cluster = MiniOzoneCluster.newBuilder(conf).setNumDatanodes(2).build();
<del> cluster.waitForClusterToBeReady();
<del> datanodeIpAddress = cluster.getHddsDatanodes().get(0)
<del> .getDatanodeDetails().getIpAddress();
<del> cluster.getOzoneManager().stop();
<del> cluster.getStorageContainerManager().stop();
<del> eventQueue = new EventQueue();
<del> nodeManager = cluster.getStorageContainerManager().getScmNodeManager();
<del> PipelineManager pipelineManager =
<del> new SCMPipelineManager(conf, nodeManager, eventQueue);
<del> containerManager = new SCMContainerManager(conf, nodeManager,
<del> pipelineManager, eventQueue);
<del> blockManager =
<del> new BlockManagerImpl(conf, cluster.getStorageContainerManager());
<del> eventQueue.addHandler(SCMEvents.CHILL_MODE_STATUS, blockManager);
<del> eventQueue.fireEvent(SCMEvents.CHILL_MODE_STATUS, false);
<del> GenericTestUtils.waitFor(() -> {
<del> return !blockManager.isScmInChillMode();
<del> }, 10, 1000 * 15);
<del> // blockManager.allocateBlock() will create containers if there is none
<del> // stored in levelDB. The number of containers to create is the value of
<del> // OZONE_SCM_PIPELINE_OWNER_CONTAINER_COUNT which we set to 2.
<del> // so the first allocateBlock() will create two containers. A random one
<del> // is assigned for the block.
<del>
<del> // loop until both the two datanodes are up, try up to about 4 seconds.
<del> for (int c = 0; c < 40; c++) {
<del> if (nodeManager.getAllNodes().size() == 2) {
<del> break;
<del> }
<del> Thread.sleep(100);
<del> }
<del> assertEquals(2, nodeManager.getAllNodes().size());
<del> AllocatedBlock ab1 = blockManager.allocateBlock(DEFAULT_BLOCK_SIZE, type,
<del> factor, CONTAINER_OWNER, new ExcludeList());
<del> blockContainerMap.put(ab1.getBlockID().getLocalID(),
<del> ab1.getBlockID().getContainerID());
<del>
<del> AllocatedBlock ab2;
<del> // we want the two blocks on the two provisioned containers respectively,
<del> // however blockManager picks containers randomly, keep retry until we
<del> // assign the second block to the other container. This seems to be the only
<del> // way to get the two containers.
<del> // although each retry will create a block and assign to a container. So
<del> // the size of blockContainerMap will vary each time the test is run.
<del> while (true) {
<del> ab2 = blockManager
<del> .allocateBlock(DEFAULT_BLOCK_SIZE, type, factor, CONTAINER_OWNER,
<del> new ExcludeList());
<del> blockContainerMap.put(ab2.getBlockID().getLocalID(),
<del> ab2.getBlockID().getContainerID());
<del> if (ab1.getBlockID().getContainerID() !=
<del> ab2.getBlockID().getContainerID()) {
<del> break;
<del> }
<del> }
<del>
<del> blockManager.close();
<del> containerManager.close();
<del> nodeManager.close();
<del>
<del> conf.set(OzoneConfigKeys.OZONE_METADATA_STORE_IMPL, metaStoreType);
<del> cli = new SQLCLI(conf);
<del>
<del> }
<del>
<del> @After
<del> public void shutdown() throws InterruptedException {
<del> if (cluster != null) {
<del> cluster.shutdown();
<del> }
<del> }
<del>
<del> @Test
<del> public void testConvertContainerDB() throws Exception {
<del> String dbOutPath = GenericTestUtils.getTempPath(
<del> UUID.randomUUID() + "/out_sql.db");
<del> // TODO : the following will fail due to empty Datanode list, need to fix.
<del> //String dnUUID = cluster.getDataNodes().get(0).getUuid();
<del> String dbRootPath = conf.get(HddsConfigKeys.OZONE_METADATA_DIRS);
<del> String dbPath = dbRootPath + "/" + SCM_CONTAINER_DB;
<del> String[] args = {"-p", dbPath, "-o", dbOutPath};
<del> Connection conn;
<del> String sql;
<del> ResultSet rs;
<del>
<del> cli.run(args);
<del>
<del> //verify the sqlite db
<del> // only checks the container names are as expected. Because other fields
<del> // such as datanode UUID are generated randomly each time
<del> conn = connectDB(dbOutPath);
<del> sql = "SELECT * FROM containerInfo";
<del> rs = executeQuery(conn, sql);
<del> ArrayList<Long> containerIDs = new ArrayList<>();
<del> while (rs.next()) {
<del> containerIDs.add(rs.getLong("containerID"));
<del> //assertEquals(dnUUID, rs.getString("leaderUUID"));
<del> }
<del> /* TODO: fix this later when the SQLCLI is fixed.
<del> assertTrue(containerIDs.size() == 2 &&
<del> containerIDs.contains(pipeline1.getContainerName()) &&
<del> containerIDs.contains(pipeline2.getContainerName()));
<del>
<del> sql = "SELECT * FROM containerMembers";
<del> rs = executeQuery(conn, sql);
<del> containerIDs = new ArrayList<>();
<del> while (rs.next()) {
<del> containerIDs.add(rs.getLong("containerID"));
<del> //assertEquals(dnUUID, rs.getString("datanodeUUID"));
<del> }
<del> assertTrue(containerIDs.size() == 2 &&
<del> containerIDs.contains(pipeline1.getContainerName()) &&
<del> containerIDs.contains(pipeline2.getContainerName()));
<del>
<del> sql = "SELECT * FROM datanodeInfo";
<del> rs = executeQuery(conn, sql);
<del> int count = 0;
<del> while (rs.next()) {
<del> assertEquals(datanodeIpAddress, rs.getString("ipAddress"));
<del> //assertEquals(dnUUID, rs.getString("datanodeUUID"));
<del> count += 1;
<del> }
<del> // the two containers maybe on the same datanode, maybe not.
<del> int expected = pipeline1.getLeader().getUuid().equals(
<del> pipeline2.getLeader().getUuid())? 1 : 2;
<del> assertEquals(expected, count);
<del> */
<del> Files.delete(Paths.get(dbOutPath));
<del> }
<del>
<del> private ResultSet executeQuery(Connection conn, String sql)
<del> throws SQLException {
<del> Statement stmt = conn.createStatement();
<del> return stmt.executeQuery(sql);
<del> }
<del>
<del> private Connection connectDB(String dbPath) throws Exception {
<del> Class.forName("org.sqlite.JDBC");
<del> String connectPath =
<del> String.format("jdbc:sqlite:%s", dbPath);
<del> return DriverManager.getConnection(connectPath);
<del> }
<del>} |
||
Java | lgpl-2.1 | dd6f82fc010213007d271e1836bdbdb4d6781d48 | 0 | simoc/mapyrus,simoc/mapyrus,simoc/mapyrus | /*
* This file is part of Mapyrus, software for plotting maps.
* Copyright (C) 2003 Simon Chenery.
*
* Mapyrus is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* Mapyrus is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Mapyrus; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
/*
* @(#) $Id$
*/
package org.mapyrus.dataset;
import java.io.IOException;
import org.mapyrus.Constants;
import org.mapyrus.MapyrusException;
import org.mapyrus.MapyrusMessages;
/**
* Factory class returning new dataset objects. Provides single interface to
* opening of all different dataset types.
*/
public class DatasetFactory
{
/**
* Opens a dataset to read geometry from.
* @param type is format of dataset, for example, "text".
* @param name is name of dataset to open.
* @param extras are special options for this dataset type such as database connection
* information, or instructions for interpreting data.
*/
public static GeographicDataset open(String type, String name,
String extras) throws MapyrusException
{
GeographicDataset retval = null;
String errorMessage = null;
/*
* Branch to open dataset, depending on type.
* Catch all kinds of dataset openning exceptions here and return
* them all as MapyrusExceptions to avoid exposing higher level code
* to lots of exception types.
*/
try
{
if (type.equalsIgnoreCase("textfile"))
retval = new TextfileDataset(name, extras);
else if (type.equalsIgnoreCase("shapefile"))
retval = new ShapefileDataset(name, extras);
else if (type.equalsIgnoreCase("jdbc"))
retval = new JDBCDataset(name, extras);
else if (type.equalsIgnoreCase("grass"))
retval = new GrassDataset(name, extras);
else
{
throw new MapyrusException(MapyrusMessages.get(MapyrusMessages.INVALID_DATASET_TYPE) +
": " + type);
}
}
catch (IOException e)
{
errorMessage = e.getMessage();
}
if (retval == null)
{
if (errorMessage == null)
errorMessage = "";
throw new MapyrusException(MapyrusMessages.get(MapyrusMessages.OPEN_DATASET_ERROR) +
": " + name + Constants.LINE_SEPARATOR + errorMessage);
}
return(retval);
}
}
| src/org/mapyrus/dataset/DatasetFactory.java | /*
* This file is part of Mapyrus, software for plotting maps.
* Copyright (C) 2003 Simon Chenery.
*
* Mapyrus is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* Mapyrus is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Mapyrus; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
/*
* @(#) $Id$
*/
package org.mapyrus.dataset;
import java.io.IOException;
import org.mapyrus.Constants;
import org.mapyrus.MapyrusException;
import org.mapyrus.MapyrusMessages;
/**
* Factory class returning new dataset objects. Provides single interface to
* opening of all different dataset types.
*/
public class DatasetFactory
{
/**
* Opens a dataset to read geometry from.
* @param type is format of dataset, for example, "text".
* @param name is name of dataset to open.
* @param extras are special options for this dataset type such as database connection
* information, or instructions for interpreting data.
*/
public static GeographicDataset open(String type, String name,
String extras) throws MapyrusException
{
GeographicDataset retval = null;
String errorMessage = null;
/*
* Branch to open dataset, depending on type.
* Catch all kinds of dataset openning exceptions here and return
* them all as MapyrusExceptions to avoid exposing higher level code
* to lots of exception types.
*/
try
{
if (type.equalsIgnoreCase("textfile"))
retval = new TextfileDataset(name, extras);
else if (type.equalsIgnoreCase("shapefile"))
retval = new ShapefileDataset(name, extras);
else if (type.equalsIgnoreCase("jdbc"))
retval = new JDBCDataset(name, extras);
else
{
throw new MapyrusException(MapyrusMessages.get(MapyrusMessages.INVALID_DATASET_TYPE) +
": " + type);
}
}
catch (IOException e)
{
errorMessage = e.getMessage();
}
if (retval == null)
{
if (errorMessage == null)
errorMessage = "";
throw new MapyrusException(MapyrusMessages.get(MapyrusMessages.OPEN_DATASET_ERROR) +
": " + name + Constants.LINE_SEPARATOR + errorMessage);
}
return(retval);
}
}
| Add "grass" format.
| src/org/mapyrus/dataset/DatasetFactory.java | Add "grass" format. | <ide><path>rc/org/mapyrus/dataset/DatasetFactory.java
<ide> retval = new ShapefileDataset(name, extras);
<ide> else if (type.equalsIgnoreCase("jdbc"))
<ide> retval = new JDBCDataset(name, extras);
<add> else if (type.equalsIgnoreCase("grass"))
<add> retval = new GrassDataset(name, extras);
<ide> else
<ide> {
<ide> throw new MapyrusException(MapyrusMessages.get(MapyrusMessages.INVALID_DATASET_TYPE) + |
|
JavaScript | mit | 8108205489005a9d7ceb4064b4f8a9e0eed12fe8 | 0 | avinoamr/pluto,avinoamr/pluto | (function() {
/**
* PLUTO
* Native HTML Template Rendering
*
*
* The MIT License (MIT)
* Copyright (c) 2013 Roi Avinoam
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
class Template extends HTMLTemplateElement {
render(obj) {
var compiled = this._compiled || (this._compiled = {})
if (compiled.html !== this.innerHTML) { // recompile
// console.log('RECOMPILE', this) // bad - nested cloned templates are re-compiled on every item
var content = this.cloneNode(true).content
Object.assign(compiled, this.compile(content), {
html: this.innerHTML
})
}
var { content, exprs, items } = compiled
return new Renderer(content, exprs, items).render(obj)
}
compile(content) {
if (content.nodeName !== '#document-fragment') {
var doc = new DocumentFragment()
doc.appendChild(content.cloneNode(true))
content = doc
}
var exprs = []
var elements = [{ el: content, path: [] }]
while (elements.length > 0) {
var { el, path } = elements.shift()
exprs = exprs.concat(this._compileEl(el, path))
// enqueue children
el.childNodes.forEach(function(el, i) {
elements.push({ el, path: path.concat(['childNodes', i]) })
})
}
// we opt to compile the repeat/cond expressions separately than the
// rest of this template - because (a) the template might relay on a
// repeated ${item} property that doesn't yet exist in the repeat
// expression, and (b) it's must smaller/faster than the complete
// expressions list.
// NB: It might not be that beneficial for cond though.
exprs = Object.assign(exprs, { eval: compileExpressions(exprs) })
return { content, exprs }
}
_compileEl(el, path) {
var exprs = []
// inner content expressions
if (el.nodeName === '#text') {
var expr = isExpressions(el.textContent)
if (expr) {
el.textContent = ''
var render = (el, v) => (el.textContent = v || '')
exprs.push({ expr, path, render })
}
}
// attributes
for (var attr of el.attributes || []) {
var expr = isExpressions(attr.value)
if (expr === null) {
continue
}
// hide expresssions from the imported templates
attr = attr.name
el.removeAttribute(attr)
var render
if (attr.startsWith('on-')) {
render = this._renderEvent(attr.slice(3)) // trim 'on-'
} else if (attr === 'class') {
render = this._renderClass()
} else if (attr === 'style') {
render = this._renderStyle()
} else if (attr === 'else') {
render = this._renderElse(el)
} else if (attr === 'repeat') {
render = this._renderRepeat(el, 'repeat')
} else if (attr === 'if') {
render = this._renderIf(el)
} else {
render = this._renderProp(snakeToCamelCase(attr))
}
exprs.push({ expr, path, attr, render })
if (render.__stopCompilation) {
// some directives (for) may require to stop the compilation as
// they handle the rest of it internally
break
}
}
return exprs
}
_renderProp(prop) {
return (el, v) => el[prop] = v
}
_renderClass() {
return function(el, v) {
if (typeof v === 'object') {
if (!Array.isArray(v)) {
v = Object.keys(v).filter((k) => v[k])
}
return v.join(' ')
}
el.className = v
}
}
_renderStyle() {
return (el, v) => typeof v === 'object'
? Object.assign(el.style, v)
: el.setAttribute('style', v)
}
_renderEvent(evName) {
return function(el, v) {
var evs = el.__plutoEvs || (el.__plutoEvs = {})
if (evs[evName] !== v) {
el.removeEventListener(evName, evs[evName])
el.addEventListener(evName, evs[evName] = v)
}
}
}
_renderElse(el, k) {
var { content, exprs } = this.compile(el.content || el);
el.replaceWith(new RepeatedNode())
el.innerHTML = ''
return Object.assign(function(el, v, obj) {
var isInited = el instanceof RepeatedNode
if (!isInited) {
Object.setPrototypeOf(el, RepeatedNode.prototype)
el.content = content
el.exprs = exprs
}
el.obj = obj
el.repeat = obj.__plutoElse ? [] : [obj.item]
}, { __stopCompilation: true })
}
_renderIf(el, k) {
var { content, exprs } = this.compile(el.content || el);
el.replaceWith(new RepeatedNode())
el.innerHTML = ''
return Object.assign(function(el, v, obj) {
var isInited = el instanceof RepeatedNode
if (!isInited) {
Object.setPrototypeOf(el, RepeatedNode.prototype)
el.content = content
el.exprs = exprs
}
obj.__plutoElse = v
el.obj = obj
el.repeat = v ? [obj.item] : []
}, { __stopCompilation: true })
}
_renderRepeat(el, k) {
var { content, exprs } = this.compile(el.content || el);
el.replaceWith(new RepeatedNode())
el.innerHTML = ''
return Object.assign(function(el, v, obj) {
var isInited = el instanceof RepeatedNode
if (!isInited) {
Object.setPrototypeOf(el, RepeatedNode.prototype)
el.content = content
el.exprs = exprs
}
obj.__plutoElse = v.length
el.obj = obj
el[k] = v
}, { __stopCompilation: true })
}
}
class RepeatedNode extends Text {
remove() {
this.repeat = [] // force the removal of the individual sub-nodes
Text.prototype.remove.apply(this, arguments)
}
set repeat(items) {
this.__items || (this.__items = [])
if (typeof items === 'object' && !Array.isArray(items)) {
items = Object.keys(items).map(function(k) {
return { key: k, value: items[k] }
})
} else if (typeof items === 'boolean') {
items = items ? [this.obj.item] : [] // 0 or 1
} else if (typeof items === 'number') {
items = new Array(items) // range-items, repeat N times.
items = Array.from(items).map(() => this.obj.item)
}
// remove obsolete items
while (this.__items.length > items.length) {
this.__items.pop().remove()
}
// update existing items
for (var i = 0; i < this.__items.length; i += 1) {
this.obj.item = items[i]
this.__items[i].render(this.obj)
}
// create new items
while (this.__items.length < items.length) {
var i = this.__items.length
this.obj.item = items[i]
var doc = new Renderer(this.content, this.exprs).render(this.obj)
this.__items.push(doc)
this.before(doc)
}
}
}
class Renderer extends DocumentFragment {
constructor(content, exprs, items) {
super()
this.exprs = exprs
this.appendChild(document.importNode(content, true))
// copy the list of generated elements from the template in order
// to support removals
this.elements = [].map.call(this.childNodes, child => child)
this.paths = this.exprs.map((expr) => select(this, expr.path))
}
remove() {
while (this.elements.length > 0) {
this.elements.pop().remove()
}
}
render(obj) {
var else_ = obj.__plutoElse
obj.__plutoElse = false
var values = this.exprs.eval(obj)
for (var i = 0 ; i < this.exprs.length ; i += 1) {
var expr = this.exprs[i]
var el = this.paths[i]
var v = values[i]
expr.render(el, v, obj)
}
var ev = new Event('pluto-rendered', { bubles: false })
for (var el of this.paths) {
el.dispatchEvent(ev)
}
obj.__plutoElse = else_
return this
}
}
// -- HELPER FUNCTIONS
// Searches for an element from root based on the property-path to the child
// example: root = <body>, path = childNodes.3.childNode.7. Resolved by walking
// the path down to the child.
function select(root, path) {
var el = root
for (var i = 0; el !== undefined && i < path.length; i += 1) {
el = el[path[i]]
}
return el
}
const SNAKE_RE = /-([a-z])/g
function snakeToCamelCase(s) {
return s.replace(SNAKE_RE, g => g[1].toUpperCase())
}
// extract expressions in template-literal syntax out of a string
var EXPR_RE = /\$\{[^\}]*\}/
function isExpressions(s) {
return EXPR_RE.test(s) ? s : null
}
// compile a list of template-literal expressions into a function that evaluates
// these expression for the provided input object
function compileExpressions(exprs) {
var refs = []
code = exprs.map(function(expr, i) {
if (expr.expr === undefined) {
return '' // can happen in nested templates
}
if (expr.expr) {
refs = refs.concat(getIdentifiers(expr.expr))
}
return `arguments[0][${i}] = T\`${expr.expr}\``
}).join(';\n')
// define the local variables and bind root-level functions to the provided
// rendered object.
var keys = refs.reduce((keys, k) => (keys[k] = true, keys), {})
var locals = Object.keys(keys).map(function (k) {
return `
var ${k} = this["${k}"];
typeof ${k} === 'function' && (${k} = bindFn(this, ${k}))
`
}).join(';\n')
var fn = eval('(function () {\n' + locals + '\n' + code + '\n})')
return function(obj) {
var res = []
try {
fn.call(obj, res)
} catch (e) {
console.warn(fn)
throw e
}
return res
}
function bindFn(obj, fn) {
var bound = fn.__plutoBound || (fn.__plutoBound = {})
if (bound.to !== obj) {
bound.to = obj
bound.fn = fn.bind(obj)
}
return bound.fn
}
function T(s, v) {
return arguments.length > 2 || typeof v === 'string'
? String.raw.apply(null, arguments)
: v
}
}
// generate the list of identifiers found in the code.
function getIdentifiers(expr) {
var re = /[$A-Z_][0-9A-Z_$]*/ig
var whitespace = ' \n\r\t'
var disallowed = '\'\".'
var skip = ['true', 'false', 'if', 'for', 'while', 'do', 'try', 'catch',
'break', 'continue', 'switch', 'throw', 'this', 'instanceof', 'in',
'function', 'delete', 'default', 'case', 'debugger', 'const', 'var',
'with', 'typeof', 'super', 'class', 'new', 'null', 'return', 'let',
'import', 'else', 'enum', 'extends', 'finally', '$']
// We first match for the valid identifier, and then check the previous
// non-whitespace character preceeding the identifier to verify that it's
// not a string or nested element.
var refs = {}
var match
while (match = re.exec(expr)) {
if (skip.indexOf(match[0]) !== -1) {
continue // skipped or reserved keyword
}
if (window[match[0]] !== undefined) {
continue // keep global functions (Object, Array, etc.)
}
var lastChar = undefined
do {
match.index -= 1
if (whitespace.indexOf(expr[match.index]) === -1) {
lastChar = expr[match.index]
}
} while (match.index > -1 && !lastChar)
if (disallowed.indexOf(lastChar) === -1) {
refs[match[0]] = true
}
}
return Object.keys(refs)
}
// upgrade an element
// NOTE this will be unnecessary with customElements
function pluto(el) {
if (typeof el === 'string') {
// support HTML-imported selections
var doc = document._currentScript // used by pollyfills
|| document.currentScript // native.
|| document // not an import.
el = (doc.ownerDocument || doc).querySelector(el)
}
return !el || el instanceof Template
? el
: Object.setPrototypeOf(el, Template.prototype)
}
pluto.Template = Template
pluto.RepeatRenderer = Renderer
window.pluto = pluto
})();
(function() {
// placed here in order to have its own scope clear of any of the pluto
// local variables.
pluto._eval = function(code) {
return eval(code)
}
})();
| pluto.js | (function() {
/**
* PLUTO
* Native HTML Template Rendering
*
*
* The MIT License (MIT)
* Copyright (c) 2013 Roi Avinoam
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
class Template extends HTMLTemplateElement {
render(obj) {
var compiled = this._compiled || (this._compiled = {})
if (compiled.html !== this.innerHTML) { // recompile
// console.log('RECOMPILE', this) // bad - nested cloned templates are re-compiled on every item
var content = this.cloneNode(true).content
Object.assign(compiled, this.compile(content), {
html: this.innerHTML
})
}
var { content, exprs, items } = compiled
return new Renderer(content, exprs, items).render(obj)
}
_renderIn(obj, el) {
if (el.__plutoRenderer) {
el.__plutoRenderer.render(obj)
} else {
var doc = this.render(obj)
el.replaceWith(doc)
el.__plutoRenderer = doc
el.remove = doc.remove.bind(doc)
}
}
compile(content) {
if (content.nodeName !== '#document-fragment') {
var doc = new DocumentFragment()
doc.appendChild(content.cloneNode(true))
content = doc
}
var exprs = []
var elements = [{ el: content, path: [] }]
while (elements.length > 0) {
var { el, path } = elements.shift()
exprs = exprs.concat(this._compileEl(el, path))
// enqueue children
el.childNodes.forEach(function(el, i) {
elements.push({ el, path: path.concat(['childNodes', i]) })
})
}
// we opt to compile the repeat/cond expressions separately than the
// rest of this template - because (a) the template might relay on a
// repeated ${item} property that doesn't yet exist in the repeat
// expression, and (b) it's must smaller/faster than the complete
// expressions list.
// NB: It might not be that beneficial for cond though.
exprs = Object.assign(exprs, { eval: compileExpressions(exprs) })
return { content, exprs }
}
_compileEl(el, path) {
var exprs = []
// inner content expressions
if (el.nodeName === '#text') {
var expr = isExpressions(el.textContent)
if (expr) {
el.textContent = ''
var render = (el, v) => (el.textContent = v || '')
exprs.push({ expr, path, render })
}
}
// attributes
for (var attr of el.attributes || []) {
var expr = isExpressions(attr.value)
if (expr === null) {
continue
}
// hide expresssions from the imported templates
attr = attr.name
el.removeAttribute(attr)
var render
if (attr.startsWith('on-')) {
render = this._renderEvent(attr.slice(3)) // trim 'on-'
} else if (attr === 'class') {
render = this._renderClass()
} else if (attr === 'style') {
render = this._renderStyle()
} else if (attr === 'else') {
render = this._renderElse(el)
} else if (attr === 'repeat') {
render = this._renderRepeat(el, 'repeat')
} else if (attr === 'if') {
render = this._renderIf(el)
} else {
render = this._renderProp(snakeToCamelCase(attr))
}
exprs.push({ expr, path, attr, render })
if (render.__stopCompilation) {
// some directives (for) may require to stop the compilation as
// they handle the rest of it internally
break
}
}
return exprs
}
_renderProp(prop) {
return (el, v) => el[prop] = v
}
_renderClass() {
return function(el, v) {
if (typeof v === 'object') {
if (!Array.isArray(v)) {
v = Object.keys(v).filter((k) => v[k])
}
return v.join(' ')
}
el.className = v
}
}
_renderStyle() {
return (el, v) => typeof v === 'object'
? Object.assign(el.style, v)
: el.setAttribute('style', v)
}
_renderEvent(evName) {
return function(el, v) {
var evs = el.__plutoEvs || (el.__plutoEvs = {})
if (evs[evName] !== v) {
el.removeEventListener(evName, evs[evName])
el.addEventListener(evName, evs[evName] = v)
}
}
}
_renderElse(el, k) {
var { content, exprs } = this.compile(el.content || el);
el.replaceWith(new RepeatedNode())
el.innerHTML = ''
return Object.assign(function(el, v, obj) {
var isInited = el instanceof RepeatedNode
if (!isInited) {
Object.setPrototypeOf(el, RepeatedNode.prototype)
el.content = content
el.exprs = exprs
}
el.obj = obj
el.repeat = obj.__plutoElse ? [] : [obj.item]
}, { __stopCompilation: true })
}
_renderIf(el, k) {
var { content, exprs } = this.compile(el.content || el);
el.replaceWith(new RepeatedNode())
el.innerHTML = ''
return Object.assign(function(el, v, obj) {
var isInited = el instanceof RepeatedNode
if (!isInited) {
Object.setPrototypeOf(el, RepeatedNode.prototype)
el.content = content
el.exprs = exprs
}
obj.__plutoElse = v
el.obj = obj
el.repeat = v ? [obj.item] : []
}, { __stopCompilation: true })
}
_renderRepeat(el, k) {
var { content, exprs } = this.compile(el.content || el);
el.replaceWith(new RepeatedNode())
el.innerHTML = ''
return Object.assign(function(el, v, obj) {
var isInited = el instanceof RepeatedNode
if (!isInited) {
Object.setPrototypeOf(el, RepeatedNode.prototype)
el.content = content
el.exprs = exprs
}
obj.__plutoElse = v.length
el.obj = obj
el[k] = v
}, { __stopCompilation: true })
}
}
class RepeatedNode extends Text {
remove() {
this.repeat = [] // force the removal of the individual sub-nodes
Text.prototype.remove.apply(this, arguments)
}
set repeat(items) {
this.__items || (this.__items = [])
if (typeof items === 'object' && !Array.isArray(items)) {
items = Object.keys(items).map(function(k) {
return { key: k, value: items[k] }
})
} else if (typeof items === 'boolean') {
items = items ? [this.obj.item] : [] // 0 or 1
} else if (typeof items === 'number') {
items = new Array(items) // range-items, repeat N times.
items = Array.from(items).map(() => this.obj.item)
}
// remove obsolete items
while (this.__items.length > items.length) {
this.__items.pop().remove()
}
// update existing items
for (var i = 0; i < this.__items.length; i += 1) {
this.obj.item = items[i]
this.__items[i].render(this.obj)
}
// create new items
while (this.__items.length < items.length) {
var i = this.__items.length
this.obj.item = items[i]
var doc = new Renderer(this.content, this.exprs).render(this.obj)
this.__items.push(doc)
this.before(doc)
}
}
}
class Renderer extends DocumentFragment {
constructor(content, exprs, items) {
super()
this.exprs = exprs
this.appendChild(document.importNode(content, true))
// copy the list of generated elements from the template in order
// to support removals
this.elements = [].map.call(this.childNodes, child => child)
this.paths = this.exprs.map((expr) => select(this, expr.path))
}
remove() {
while (this.elements.length > 0) {
this.elements.pop().remove()
}
}
render(obj) {
var else_ = obj.__plutoElse
obj.__plutoElse = false
var values = this.exprs.eval(obj)
for (var i = 0 ; i < this.exprs.length ; i += 1) {
var expr = this.exprs[i]
var el = this.paths[i]
var v = values[i]
expr.render(el, v, obj)
}
var ev = new Event('pluto-rendered', { bubles: false })
for (var el of this.paths) {
el.dispatchEvent(ev)
}
obj.__plutoElse = else_
return this
}
}
// -- HELPER FUNCTIONS
// Searches for an element from root based on the property-path to the child
// example: root = <body>, path = childNodes.3.childNode.7. Resolved by walking
// the path down to the child.
function select(root, path) {
var el = root
for (var i = 0; el !== undefined && i < path.length; i += 1) {
el = el[path[i]]
}
return el
}
const SNAKE_RE = /-([a-z])/g
function snakeToCamelCase(s) {
return s.replace(SNAKE_RE, g => g[1].toUpperCase())
}
// extract expressions in template-literal syntax out of a string
var EXPR_RE = /\$\{[^\}]*\}/
function isExpressions(s) {
return EXPR_RE.test(s) ? s : null
}
// compile a list of template-literal expressions into a function that evaluates
// these expression for the provided input object
function compileExpressions(exprs) {
var refs = []
code = exprs.map(function(expr, i) {
if (expr.expr === undefined) {
return '' // can happen in nested templates
}
if (expr.expr) {
refs = refs.concat(getIdentifiers(expr.expr))
}
return `arguments[0][${i}] = T\`${expr.expr}\``
}).join(';\n')
// define the local variables and bind root-level functions to the provided
// rendered object.
var keys = refs.reduce((keys, k) => (keys[k] = true, keys), {})
var locals = Object.keys(keys).map(function (k) {
return `
var ${k} = this["${k}"];
typeof ${k} === 'function' && (${k} = bindFn(this, ${k}))
`
}).join(';\n')
var fn = eval('(function () {\n' + locals + '\n' + code + '\n})')
return function(obj) {
var res = []
try {
fn.call(obj, res)
} catch (e) {
console.warn(fn)
throw e
}
return res
}
function bindFn(obj, fn) {
var bound = fn.__plutoBound || (fn.__plutoBound = {})
if (bound.to !== obj) {
bound.to = obj
bound.fn = fn.bind(obj)
}
return bound.fn
}
function T(s, v) {
return arguments.length > 2 || typeof v === 'string'
? String.raw.apply(null, arguments)
: v
}
}
// generate the list of identifiers found in the code.
function getIdentifiers(expr) {
var re = /[$A-Z_][0-9A-Z_$]*/ig
var whitespace = ' \n\r\t'
var disallowed = '\'\".'
var skip = ['true', 'false', 'if', 'for', 'while', 'do', 'try', 'catch',
'break', 'continue', 'switch', 'throw', 'this', 'instanceof', 'in',
'function', 'delete', 'default', 'case', 'debugger', 'const', 'var',
'with', 'typeof', 'super', 'class', 'new', 'null', 'return', 'let',
'import', 'else', 'enum', 'extends', 'finally', '$']
// We first match for the valid identifier, and then check the previous
// non-whitespace character preceeding the identifier to verify that it's
// not a string or nested element.
var refs = {}
var match
while (match = re.exec(expr)) {
if (skip.indexOf(match[0]) !== -1) {
continue // skipped or reserved keyword
}
if (window[match[0]] !== undefined) {
continue // keep global functions (Object, Array, etc.)
}
var lastChar = undefined
do {
match.index -= 1
if (whitespace.indexOf(expr[match.index]) === -1) {
lastChar = expr[match.index]
}
} while (match.index > -1 && !lastChar)
if (disallowed.indexOf(lastChar) === -1) {
refs[match[0]] = true
}
}
return Object.keys(refs)
}
// upgrade an element
// NOTE this will be unnecessary with customElements
function pluto(el) {
if (typeof el === 'string') {
// support HTML-imported selections
var doc = document._currentScript // used by pollyfills
|| document.currentScript // native.
|| document // not an import.
el = (doc.ownerDocument || doc).querySelector(el)
}
return !el || el instanceof Template
? el
: Object.setPrototypeOf(el, Template.prototype)
}
pluto.Template = Template
pluto.RepeatRenderer = Renderer
window.pluto = pluto
})();
(function() {
// placed here in order to have its own scope clear of any of the pluto
// local variables.
pluto._eval = function(code) {
return eval(code)
}
})();
| Remove obsolete code
| pluto.js | Remove obsolete code | <ide><path>luto.js
<ide> return new Renderer(content, exprs, items).render(obj)
<ide> }
<ide>
<del> _renderIn(obj, el) {
<del> if (el.__plutoRenderer) {
<del> el.__plutoRenderer.render(obj)
<del> } else {
<del> var doc = this.render(obj)
<del> el.replaceWith(doc)
<del> el.__plutoRenderer = doc
<del> el.remove = doc.remove.bind(doc)
<del> }
<del> }
<del>
<ide> compile(content) {
<ide> if (content.nodeName !== '#document-fragment') {
<ide> var doc = new DocumentFragment() |
|
Java | apache-2.0 | e71ae792234fee885cdfef21345e2b972a0bea5d | 0 | alien11689/aries,graben/aries,rotty3000/aries,apache/aries,rotty3000/aries,alien11689/aries,graben/aries,alien11689/aries,rotty3000/aries,graben/aries,graben/aries,apache/aries,rotty3000/aries,apache/aries,alien11689/aries,apache/aries | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly;
import java.io.IOException;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.net.URL;
import java.net.URLClassLoader;
import java.security.ProtectionDomain;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Dictionary;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.List;
import org.apache.aries.spifly.api.SpiFlyConstants;
import org.easymock.EasyMock;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleContext;
import org.osgi.framework.BundleReference;
import org.osgi.framework.Version;
import org.osgi.framework.hooks.weaving.WeavingHook;
import org.osgi.framework.hooks.weaving.WovenClass;
import org.osgi.framework.wiring.BundleWiring;
public class ClientWeavingHookTest {
@Before
public void setUp() {
Activator.activator = new Activator();
}
@After
public void tearDown() {
Activator.activator = null;
}
@Test
public void testClientWeavingHookBasicServiveLoaderUsage() throws Exception {
Dictionary<String, String> consumerHeaders = new Hashtable<String, String>();
consumerHeaders.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "true");
// Register the bundle that provides the SPI implementation.
Bundle providerBundle = mockProviderBundle("impl1", 1, "META-INF/services/org.apache.aries.mytest.MySPI");
Activator.activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle);
Bundle consumerBundle = mockConsumerBundle(consumerHeaders, providerBundle);
Bundle spiFlyBundle = mockSpiFlyBundle("spifly", Version.parseVersion("1.9.4"), consumerBundle, providerBundle);
WeavingHook wh = new ClientWeavingHook(spiFlyBundle.getBundleContext());
// Weave the TestClient class.
URL clsUrl = getClass().getResource("TestClient.class");
Assert.assertNotNull("Precondition", clsUrl);
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.TestClient", consumerBundle);
Assert.assertEquals("Precondition", 0, wc.getDynamicImports().size());
wh.weave(wc);
Assert.assertEquals(1, wc.getDynamicImports().size());
String di1 = "org.apache.aries.spifly;bundle-symbolic-name=spifly;bundle-version=1.9.4";
String di2 = "org.apache.aries.spifly;bundle-version=1.9.4;bundle-symbolic-name=spifly";
String di = wc.getDynamicImports().get(0);
Assert.assertTrue("Weaving should have added a dynamic import", di1.equals(di) || di2.equals(di));
// Invoke the woven class and check that it propertly sets the TCCL so that the
// META-INF/services/org.apache.aries.mytest.MySPI file from impl1 is visible.
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {String.class});
Object result = method.invoke(cls.newInstance(), "hello");
Assert.assertEquals("olleh", result);
}
@Test
public void testClientWeavingHookAltServiceLoaderLoadUnprocessed() throws Exception {
Bundle spiFlyBundle = mockSpiFlyBundle();
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "true");
Bundle consumerBundle = mockConsumerBundle(headers, spiFlyBundle);
WeavingHook wh = new ClientWeavingHook(spiFlyBundle.getBundleContext());
// Weave the TestClient class.
URL clsUrl = getClass().getResource("UnaffectedTestClient.class");
Assert.assertNotNull("Precondition", clsUrl);
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.UnaffectedTestClient", consumerBundle);
Assert.assertEquals("Precondition", 0, wc.getDynamicImports().size());
wh.weave(wc);
Assert.assertEquals("The client is not affected so no additional imports should have been added",
0, wc.getDynamicImports().size());
// ok the weaving is done, now prepare the registry for the call
Bundle providerBundle = mockProviderBundle("impl1", 1, "META-INF/services/org.apache.aries.mytest.MySPI");
Activator.activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle);
// Invoke the woven class and check that it propertly sets the TCCL so that the
// META-INF/services/org.apache.aries.mytest.MySPI file from impl1 is visible.
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {String.class});
Object result = method.invoke(cls.newInstance(), "hello");
Assert.assertEquals("impl4", result);
}
@Test
public void testClientWeavingHookMultipleProviders() throws Exception {
Bundle spiFlyBundle = mockSpiFlyBundle();
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "true");
Bundle consumerBundle = mockConsumerBundle(headers, spiFlyBundle);
WeavingHook wh = new ClientWeavingHook(spiFlyBundle.getBundleContext());
// Weave the TestClient class.
URL clsUrl = getClass().getResource("TestClient.class");
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.TestClient", consumerBundle);
wh.weave(wc);
Bundle providerBundle1 = mockProviderBundle("impl1", 1, "META-INF/services/org.apache.aries.mytest.MySPI");
Bundle providerBundle2 = mockProviderBundle("impl2", 2, "META-INF/services/org.apache.aries.mytest.MySPI");
// Register in reverse order to make sure the order in which bundles are sorted is correct
Activator.activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle2);
Activator.activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle1);
// Invoke the woven class and check that it propertly sets the TCCL so that the
// META-INF/services/org.apache.aries.mytest.MySPI files from impl1 and impl2 are visible.
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {String.class});
Object result = method.invoke(cls.newInstance(), "hello");
Assert.assertEquals("All three services should be invoked in the correct order", "ollehHELLO5", result);
}
@Test
public void testClientSpecifyingProvider() throws Exception {
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "java.util.ServiceLoader#load(java.lang.Class);bundle=impl2");
Bundle providerBundle1 = mockProviderBundle("impl1", 1, "META-INF/services/org.apache.aries.mytest.MySPI");
Bundle providerBundle2 = mockProviderBundle("impl2", 2, "META-INF/services/org.apache.aries.mytest.MySPI");
Activator.activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle1);
Activator.activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle2);
Bundle consumerBundle = mockConsumerBundle(headers, providerBundle1, providerBundle2);
Bundle spiFlyBundle = mockSpiFlyBundle(consumerBundle, providerBundle1, providerBundle2);
WeavingHook wh = new ClientWeavingHook(spiFlyBundle.getBundleContext());
// Weave the TestClient class.
URL clsUrl = getClass().getResource("TestClient.class");
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.TestClient", consumerBundle);
wh.weave(wc);
// Invoke the woven class and check that it propertly sets the TCCL so that the
// META-INF/services/org.apache.aries.mytest.MySPI file from impl2 is visible.
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {String.class});
Object result = method.invoke(cls.newInstance(), "hello");
Assert.assertEquals("Only the services from bundle impl2 should be selected", "HELLO5", result);
}
@Test
public void testClientSpecifyingProviderVersion() throws Exception {
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "java.util.ServiceLoader#load(java.lang.Class);bundle=impl2;version=1.2.3");
Bundle providerBundle1 = mockProviderBundle("impl1", 1, "META-INF/services/org.apache.aries.mytest.MySPI");
Bundle providerBundle2 = mockProviderBundle("impl2", 2, "META-INF/services/org.apache.aries.mytest.MySPI");
Bundle providerBundle3 = mockProviderBundle("impl2", 3, new Version(1, 2, 3), "META-INF/services/org.apache.aries.mytest.MySPI");
Activator.activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle1);
Activator.activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle2);
Activator.activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle3);
Bundle consumerBundle = mockConsumerBundle(headers, providerBundle1, providerBundle2, providerBundle3);
Bundle spiFlyBundle = mockSpiFlyBundle(consumerBundle, providerBundle1, providerBundle2, providerBundle3);
WeavingHook wh = new ClientWeavingHook(spiFlyBundle.getBundleContext());
// Weave the TestClient class.
URL clsUrl = getClass().getResource("TestClient.class");
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.TestClient", consumerBundle);
wh.weave(wc);
// Invoke the woven class and check that it propertly sets the TCCL so that the
// META-INF/services/org.apache.aries.mytest.MySPI file from impl2 is visible.
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {String.class});
Object result = method.invoke(cls.newInstance(), "hello");
Assert.assertEquals("Only the services from bundle impl2 should be selected", "Updated!Hello!Updated", result);
}
@Test
public void testClientSpecificProviderLoadArgument() throws Exception {
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER,
"java.util.ServiceLoader#load(java.lang.Class[org.apache.aries.mytest.MySPI])," +
"java.util.ServiceLoader#load(java.lang.Class[org.apache.aries.mytest.AltSPI]);bundle=impl4");
Bundle providerBundle1 = mockProviderBundle("impl1", 1, "META-INF/services/org.apache.aries.mytest.MySPI");
Bundle providerBundle2 = mockProviderBundle("impl2", 2, "META-INF/services/org.apache.aries.mytest.MySPI", "META-INF/services/org.apache.aries.mytest.AltSPI");
Bundle providerBundle4 = mockProviderBundle("impl4", 4, "META-INF/services/org.apache.aries.mytest.MySPI", "META-INF/services/org.apache.aries.mytest.AltSPI");
Activator.activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle1);
Activator.activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle2);
Activator.activator.registerProviderBundle("org.apache.aries.mytest.AltSPI", providerBundle2);
Activator.activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle4);
Activator.activator.registerProviderBundle("org.apache.aries.mytest.AltSPI", providerBundle4);
Bundle consumerBundle = mockConsumerBundle(headers, providerBundle1, providerBundle2, providerBundle4);
Bundle spiFlyBundle = mockSpiFlyBundle(consumerBundle, providerBundle1, providerBundle2, providerBundle4);
WeavingHook wh = new ClientWeavingHook(spiFlyBundle.getBundleContext());
// Weave the TestClient class.
URL clsUrl = getClass().getResource("TestClient.class");
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.TestClient", consumerBundle);
wh.weave(wc);
// Invoke the woven class and check that it propertly sets the TCCL so that the
// META-INF/services/org.apache.aries.mytest.MySPI file from impl2 is visible.
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {String.class});
Object result = method.invoke(cls.newInstance(), "hello");
Assert.assertEquals("All providers should be selected for this one", "ollehHELLO5", result);
// Weave the AltTestClient class.
URL cls2Url = getClass().getResource("AltTestClient.class");
WovenClass wc2 = new MyWovenClass(cls2Url, "org.apache.aries.spifly.AltTestClient", consumerBundle);
wh.weave(wc2);
// Invoke the AltTestClient
Class<?> cls2 = wc2.getDefinedClass();
Method method2 = cls2.getMethod("test", new Class [] {long.class});
Object result2 = method2.invoke(cls2.newInstance(), 4096);
Assert.assertEquals("Only the services from bundle impl4 should be selected", 8192, result2);
}
@Test
public void testClientSpecifyingTwoDifferentMethodsLimitedToDifferentProviders() {
Assert.fail();
}
@Test
public void testJAXPClientWantsJREImplementation1() throws Exception {
Bundle systembundle = mockSystemBundle();
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "javax.xml.parsers.DocumentBuilderFactory#newInstance()");
Bundle consumerBundle = mockConsumerBundle(headers, systembundle);
WeavingHook wh = new ClientWeavingHook(mockSpiFlyBundle(consumerBundle, systembundle).getBundleContext());
URL clsUrl = getClass().getResource("JaxpClient.class");
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.JaxpClient", consumerBundle);
wh.weave(wc);
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {});
Class<?> result = (Class<?>) method.invoke(cls.newInstance());
Assert.assertEquals("JAXP implementation from JRE", "com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl", result.getName());
}
// If there is an alternate implementation it should always be favoured over the JRE one
@Test
public void testJAXPClientWantsAltImplementation1() throws Exception {
Bundle systembundle = mockSystemBundle();
Bundle providerBundle = mockProviderBundle("impl3", 1, "META-INF/services/javax.xml.parsers.DocumentBuilderFactory");
Activator.activator.registerProviderBundle("javax.xml.parsers.DocumentBuilderFactory", providerBundle);
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "javax.xml.parsers.DocumentBuilderFactory#newInstance()");
Bundle consumerBundle = mockConsumerBundle(headers, providerBundle, systembundle);
WeavingHook wh = new ClientWeavingHook(mockSpiFlyBundle(consumerBundle, providerBundle, systembundle).getBundleContext());
URL clsUrl = getClass().getResource("JaxpClient.class");
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.JaxpClient", consumerBundle);
wh.weave(wc);
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {});
Class<?> result = (Class<?>) method.invoke(cls.newInstance());
Assert.assertEquals("JAXP implementation from JRE", "org.apache.aries.spifly.impl3.MyAltDocumentBuilderFactory", result.getName());
}
@Test
public void testJAXPClientWantsJREImplementation2() throws Exception {
Bundle systembundle = mockSystemBundle();
Bundle providerBundle = mockProviderBundle("impl3", 1, "META-INF/services/javax.xml.parsers.DocumentBuilderFactory");
Activator.activator.registerProviderBundle("javax.xml.parsers.DocumentBuilderFactory", providerBundle);
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "javax.xml.parsers.DocumentBuilderFactory#newInstance();bundleId=0");
Bundle consumerBundle = mockConsumerBundle(headers, providerBundle, systembundle);
WeavingHook wh = new ClientWeavingHook(mockSpiFlyBundle(consumerBundle, providerBundle, systembundle).getBundleContext());
URL clsUrl = getClass().getResource("JaxpClient.class");
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.JaxpClient", consumerBundle);
wh.weave(wc);
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {});
Class<?> result = (Class<?>) method.invoke(cls.newInstance());
Assert.assertEquals("JAXP implementation from JRE", "com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl", result.getName());
}
@Test
public void testJAXPClientWantsAltImplementation2() throws Exception {
Bundle systembundle = mockSystemBundle();
Bundle providerBundle = mockProviderBundle("impl3", 1, "META-INF/services/javax.xml.parsers.DocumentBuilderFactory");
Activator.activator.registerProviderBundle("javax.xml.parsers.DocumentBuilderFactory", providerBundle);
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "javax.xml.parsers.DocumentBuilderFactory#newInstance();bundle=impl3");
Bundle consumerBundle = mockConsumerBundle(headers, providerBundle, systembundle);
WeavingHook wh = new ClientWeavingHook(mockSpiFlyBundle(consumerBundle, providerBundle, systembundle).getBundleContext());
URL clsUrl = getClass().getResource("JaxpClient.class");
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.JaxpClient", consumerBundle);
wh.weave(wc);
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {});
Class<?> result = (Class<?>) method.invoke(cls.newInstance());
Assert.assertEquals("JAXP implementation from alternative bundle", "org.apache.aries.spifly.impl3.MyAltDocumentBuilderFactory", result.getName());
}
private Bundle mockSpiFlyBundle(Bundle ... bundles) throws Exception {
return mockSpiFlyBundle("spifly", new Version(1, 0, 0), bundles);
}
private Bundle mockSpiFlyBundle(String bsn, Version version, Bundle ... bundles) throws Exception {
Bundle spiFlyBundle = EasyMock.createMock(Bundle.class);
BundleContext spiFlyBundleContext = EasyMock.createMock(BundleContext.class);
EasyMock.expect(spiFlyBundleContext.getBundle()).andReturn(spiFlyBundle).anyTimes();
List<Bundle> allBundles = new ArrayList<Bundle>(Arrays.asList(bundles));
allBundles.add(spiFlyBundle);
EasyMock.expect(spiFlyBundleContext.getBundles()).andReturn(allBundles.toArray(new Bundle [] {})).anyTimes();
EasyMock.replay(spiFlyBundleContext);
EasyMock.expect(spiFlyBundle.getSymbolicName()).andReturn(bsn).anyTimes();
EasyMock.expect(spiFlyBundle.getVersion()).andReturn(version).anyTimes();
EasyMock.expect(spiFlyBundle.getBundleContext()).andReturn(spiFlyBundleContext).anyTimes();
EasyMock.replay(spiFlyBundle);
// Set the bundle context for testing purposes
Field bcField = Activator.class.getDeclaredField("bundleContext");
bcField.setAccessible(true);
bcField.set(Activator.activator, spiFlyBundle.getBundleContext());
return spiFlyBundle;
}
private Bundle mockProviderBundle(String subdir, long id, String ... resources) {
return mockProviderBundle(subdir, id, Version.emptyVersion, resources);
}
private Bundle mockProviderBundle(String subdir, long id, Version version, String ... resources) {
// Set up the classloader that will be used by the ASM-generated code as the TCCL.
// It can load a META-INF/services file
ClassLoader cl = new TestImplClassLoader(subdir, resources);
// The BundleWiring API is used on the bundle by the generated code to obtain its classloader
BundleWiring bw = EasyMock.createMock(BundleWiring.class);
EasyMock.expect(bw.getClassLoader()).andReturn(cl);
EasyMock.replay(bw);
Bundle providerBundle = EasyMock.createMock(Bundle.class);
EasyMock.expect(providerBundle.adapt(BundleWiring.class)).andReturn(bw);
EasyMock.expect(providerBundle.getSymbolicName()).andReturn(subdir).anyTimes();
EasyMock.expect(providerBundle.getBundleId()).andReturn(id).anyTimes();
EasyMock.expect(providerBundle.getVersion()).andReturn(version).anyTimes();
EasyMock.replay(providerBundle);
return providerBundle;
}
private Bundle mockConsumerBundle(Dictionary<String, String> headers, Bundle ... otherBundles) {
// Create a mock object for the client bundle which holds the code that uses ServiceLoader.load()
// or another SPI invocation.
BundleContext bc = EasyMock.createMock(BundleContext.class);
Bundle consumerBundle = EasyMock.createMock(Bundle.class);
EasyMock.expect(consumerBundle.getSymbolicName()).andReturn("testConsumer").anyTimes();
EasyMock.expect(consumerBundle.getHeaders()).andReturn(headers);
EasyMock.expect(consumerBundle.getBundleContext()).andReturn(bc);
EasyMock.expect(consumerBundle.getBundleId()).andReturn(Long.MAX_VALUE).anyTimes();
EasyMock.replay(consumerBundle);
List<Bundle> allBundles = new ArrayList<Bundle>(Arrays.asList(otherBundles));
allBundles.add(consumerBundle);
EasyMock.expect(bc.getBundles()).andReturn(allBundles.toArray(new Bundle [] {}));
EasyMock.replay(bc);
return consumerBundle;
}
private Bundle mockSystemBundle() {
Bundle systemBundle = EasyMock.createMock(Bundle.class);
EasyMock.expect(systemBundle.getBundleId()).andReturn(0L).anyTimes();
EasyMock.replay(systemBundle);
return systemBundle;
}
public static class TestImplClassLoader extends URLClassLoader {
private final List<String> resources;
private final String prefix;
public TestImplClassLoader(String subdir, String ... resources) {
super(new URL [] {}, TestImplClassLoader.class.getClassLoader());
this.prefix = TestImplClassLoader.class.getPackage().getName().replace('.', '/') + "/" + subdir + "/";
this.resources = Arrays.asList(resources);
}
@Override
public URL findResource(String name) {
if (resources.contains(name)) {
return getClass().getClassLoader().getResource(prefix + name);
} else {
return super.findResource(name);
}
}
@Override
public Enumeration<URL> findResources(String name) throws IOException {
if (resources.contains(name)) {
return getClass().getClassLoader().getResources(prefix + name);
} else {
return super.findResources(name);
}
}
}
private static class MyWovenClass implements WovenClass {
byte [] bytes;
final String className;
final Bundle bundleContainingOriginalClass;
List<String> dynamicImports = new ArrayList<String>();
boolean weavingComplete = false;
private MyWovenClass(URL clazz, String name, Bundle bundle) throws Exception {
bytes = Streams.suck(clazz.openStream());
className = name;
bundleContainingOriginalClass = bundle;
}
@Override
public byte[] getBytes() {
return bytes;
}
@Override
public void setBytes(byte[] newBytes) {
bytes = newBytes;
}
@Override
public List<String> getDynamicImports() {
return dynamicImports;
}
@Override
public boolean isWeavingComplete() {
return weavingComplete;
}
@Override
public String getClassName() {
return className;
}
@Override
public ProtectionDomain getProtectionDomain() {
return null;
}
@Override
public Class<?> getDefinedClass() {
try {
weavingComplete = true;
return new MyWovenClassClassLoader(className, getBytes(), getClass().getClassLoader(), bundleContainingOriginalClass).loadClass(className);
} catch (ClassNotFoundException e) {
e.printStackTrace();
return null;
}
}
@Override
public BundleWiring getBundleWiring() {
BundleWiring bw = EasyMock.createMock(BundleWiring.class);
EasyMock.expect(bw.getBundle()).andReturn(bundleContainingOriginalClass);
EasyMock.replay(bw);
return bw;
}
}
private static class MyWovenClassClassLoader extends ClassLoader implements BundleReference {
private final String className;
private final Bundle bundle;
private final byte [] bytes;
public MyWovenClassClassLoader(String className, byte[] bytes, ClassLoader parent, Bundle bundle) {
super(parent);
this.className = className;
this.bundle = bundle;
this.bytes = bytes;
}
@Override
protected synchronized Class<?> loadClass(String name, boolean resolve)
throws ClassNotFoundException {
if (name.equals(className)) {
return defineClass(className, bytes, 0, bytes.length);
} else {
return super.loadClass(name, resolve);
}
}
@Override
public Class<?> loadClass(String name) throws ClassNotFoundException {
return loadClass(name, false);
}
@Override
public Bundle getBundle() {
return bundle;
}
}
}
| spi-fly/contrib/pilot_using_weavinghook/SpiFlyTests/src/org/apache/aries/spifly/ClientWeavingHookTest.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.spifly;
import java.io.IOException;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.net.URL;
import java.net.URLClassLoader;
import java.security.ProtectionDomain;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Dictionary;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.List;
import org.apache.aries.spifly.api.SpiFlyConstants;
import org.easymock.EasyMock;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleContext;
import org.osgi.framework.BundleReference;
import org.osgi.framework.Version;
import org.osgi.framework.hooks.weaving.WeavingHook;
import org.osgi.framework.hooks.weaving.WovenClass;
import org.osgi.framework.wiring.BundleWiring;
public class ClientWeavingHookTest {
@Before
public void setUp() {
Activator.activator = new Activator();
}
@After
public void tearDown() {
Activator.activator = null;
}
@Test
public void testClientWeavingHookBasicServiveLoaderUsage() throws Exception {
Dictionary<String, String> consumerHeaders = new Hashtable<String, String>();
consumerHeaders.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "true");
// Register the bundle that provides the SPI implementation.
Bundle providerBundle = mockProviderBundle("impl1", 1, "META-INF/services/org.apache.aries.mytest.MySPI");
Activator.activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle);
Bundle consumerBundle = mockConsumerBundle(consumerHeaders, providerBundle);
Bundle spiFlyBundle = mockSpiFlyBundle("spifly", Version.parseVersion("1.9.4"), consumerBundle, providerBundle);
WeavingHook wh = new ClientWeavingHook(spiFlyBundle.getBundleContext());
// Weave the TestClient class.
URL clsUrl = getClass().getResource("TestClient.class");
Assert.assertNotNull("Precondition", clsUrl);
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.TestClient", consumerBundle);
Assert.assertEquals("Precondition", 0, wc.getDynamicImports().size());
wh.weave(wc);
Assert.assertEquals(1, wc.getDynamicImports().size());
String di1 = "org.apache.aries.spifly;bundle-symbolic-name=spifly;bundle-version=1.9.4";
String di2 = "org.apache.aries.spifly;bundle-version=1.9.4;bundle-symbolic-name=spifly";
String di = wc.getDynamicImports().get(0);
Assert.assertTrue("Weaving should have added a dynamic import", di1.equals(di) || di2.equals(di));
// Invoke the woven class and check that it propertly sets the TCCL so that the
// META-INF/services/org.apache.aries.mytest.MySPI file from impl1 is visible.
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {String.class});
Object result = method.invoke(cls.newInstance(), "hello");
Assert.assertEquals("olleh", result);
}
@Test
public void testClientWeavingHookAltServiceLoaderLoadUnprocessed() throws Exception {
Bundle spiFlyBundle = mockSpiFlyBundle();
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "true");
Bundle consumerBundle = mockConsumerBundle(headers, spiFlyBundle);
WeavingHook wh = new ClientWeavingHook(spiFlyBundle.getBundleContext());
// Weave the TestClient class.
URL clsUrl = getClass().getResource("UnaffectedTestClient.class");
Assert.assertNotNull("Precondition", clsUrl);
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.UnaffectedTestClient", consumerBundle);
Assert.assertEquals("Precondition", 0, wc.getDynamicImports().size());
wh.weave(wc);
Assert.assertEquals("The client is not affected so no additional imports should have been added",
0, wc.getDynamicImports().size());
// ok the weaving is done, now prepare the registry for the call
Bundle providerBundle = mockProviderBundle("impl1", 1, "META-INF/services/org.apache.aries.mytest.MySPI");
Activator.activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle);
// Invoke the woven class and check that it propertly sets the TCCL so that the
// META-INF/services/org.apache.aries.mytest.MySPI file from impl1 is visible.
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {String.class});
Object result = method.invoke(cls.newInstance(), "hello");
Assert.assertEquals("impl4", result);
}
@Test
public void testClientWeavingHookMultipleProviders() throws Exception {
Bundle spiFlyBundle = mockSpiFlyBundle();
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "true");
Bundle consumerBundle = mockConsumerBundle(headers, spiFlyBundle);
WeavingHook wh = new ClientWeavingHook(spiFlyBundle.getBundleContext());
// Weave the TestClient class.
URL clsUrl = getClass().getResource("TestClient.class");
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.TestClient", consumerBundle);
wh.weave(wc);
Bundle providerBundle1 = mockProviderBundle("impl1", 1, "META-INF/services/org.apache.aries.mytest.MySPI");
Bundle providerBundle2 = mockProviderBundle("impl2", 2, "META-INF/services/org.apache.aries.mytest.MySPI");
// Register in reverse order to make sure the order in which bundles are sorted is correct
Activator.activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle2);
Activator.activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle1);
// Invoke the woven class and check that it propertly sets the TCCL so that the
// META-INF/services/org.apache.aries.mytest.MySPI files from impl1 and impl2 are visible.
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {String.class});
Object result = method.invoke(cls.newInstance(), "hello");
Assert.assertEquals("All three services should be invoked in the correct order", "ollehHELLO5", result);
}
@Test
public void testClientSpecifyingProvider() throws Exception {
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "java.util.ServiceLoader#load(java.lang.Class);bundle=impl2");
Bundle providerBundle1 = mockProviderBundle("impl1", 1, "META-INF/services/org.apache.aries.mytest.MySPI");
Bundle providerBundle2 = mockProviderBundle("impl2", 2, "META-INF/services/org.apache.aries.mytest.MySPI");
Activator.activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle1);
Activator.activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle2);
Bundle consumerBundle = mockConsumerBundle(headers, providerBundle1, providerBundle2);
Bundle spiFlyBundle = mockSpiFlyBundle(consumerBundle, providerBundle1, providerBundle2);
WeavingHook wh = new ClientWeavingHook(spiFlyBundle.getBundleContext());
// Weave the TestClient class.
URL clsUrl = getClass().getResource("TestClient.class");
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.TestClient", consumerBundle);
wh.weave(wc);
// Invoke the woven class and check that it propertly sets the TCCL so that the
// META-INF/services/org.apache.aries.mytest.MySPI file from impl2 is visible.
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {String.class});
Object result = method.invoke(cls.newInstance(), "hello");
Assert.assertEquals("Only the services from bundle impl2 should be selected", "HELLO5", result);
}
@Test
public void testClientSpecifyingProviderVersion() throws Exception {
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "java.util.ServiceLoader#load(java.lang.Class);bundle=impl2;version=1.2.3");
Bundle providerBundle1 = mockProviderBundle("impl1", 1, "META-INF/services/org.apache.aries.mytest.MySPI");
Bundle providerBundle2 = mockProviderBundle("impl2", 2, "META-INF/services/org.apache.aries.mytest.MySPI");
Bundle providerBundle3 = mockProviderBundle("impl2", 3, new Version(1, 2, 3), "META-INF/services/org.apache.aries.mytest.MySPI");
Activator.activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle1);
Activator.activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle2);
Activator.activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle3);
Bundle consumerBundle = mockConsumerBundle(headers, providerBundle1, providerBundle2, providerBundle3);
Bundle spiFlyBundle = mockSpiFlyBundle(consumerBundle, providerBundle1, providerBundle2, providerBundle3);
WeavingHook wh = new ClientWeavingHook(spiFlyBundle.getBundleContext());
// Weave the TestClient class.
URL clsUrl = getClass().getResource("TestClient.class");
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.TestClient", consumerBundle);
wh.weave(wc);
// Invoke the woven class and check that it propertly sets the TCCL so that the
// META-INF/services/org.apache.aries.mytest.MySPI file from impl2 is visible.
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {String.class});
Object result = method.invoke(cls.newInstance(), "hello");
Assert.assertEquals("Only the services from bundle impl2 should be selected", "Updated!Hello!Updated", result);
}
@Test
public void testClientSpecificProviderLoadArgument() throws Exception {
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER,
"java.util.ServiceLoader#load(java.lang.Class[org.apache.aries.mytest.MySPI])," +
"java.util.ServiceLoader#load(java.lang.Class[org.apache.aries.mytest.AltSPI]);bundle=impl4");
Bundle providerBundle1 = mockProviderBundle("impl1", 1, "META-INF/services/org.apache.aries.mytest.MySPI");
Bundle providerBundle2 = mockProviderBundle("impl2", 2, "META-INF/services/org.apache.aries.mytest.MySPI", "META-INF/services/org.apache.aries.mytest.AltSPI");
Bundle providerBundle4 = mockProviderBundle("impl4", 4, "META-INF/services/org.apache.aries.mytest.MySPI", "META-INF/services/org.apache.aries.mytest.AltSPI");
Activator.activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle1);
Activator.activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle2);
Activator.activator.registerProviderBundle("org.apache.aries.mytest.AltSPI", providerBundle2);
Activator.activator.registerProviderBundle("org.apache.aries.mytest.MySPI", providerBundle4);
Activator.activator.registerProviderBundle("org.apache.aries.mytest.AltSPI", providerBundle4);
Bundle consumerBundle = mockConsumerBundle(headers, providerBundle1, providerBundle2, providerBundle4);
Bundle spiFlyBundle = mockSpiFlyBundle(consumerBundle, providerBundle1, providerBundle2, providerBundle4);
WeavingHook wh = new ClientWeavingHook(spiFlyBundle.getBundleContext());
// Weave the TestClient class.
URL clsUrl = getClass().getResource("TestClient.class");
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.TestClient", consumerBundle);
wh.weave(wc);
// Invoke the woven class and check that it propertly sets the TCCL so that the
// META-INF/services/org.apache.aries.mytest.MySPI file from impl2 is visible.
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {String.class});
Object result = method.invoke(cls.newInstance(), "hello");
Assert.assertEquals("Only the services from bundle impl2 should be selected", "ollehHELLO5", result);
// Weave the AltTestClient class.
URL cls2Url = getClass().getResource("AltTestClient.class");
WovenClass wc2 = new MyWovenClass(cls2Url, "org.apache.aries.spifly.AltTestClient", consumerBundle);
wh.weave(wc2);
// Invoke the AltTestClient
Class<?> cls2 = wc2.getDefinedClass();
Method method2 = cls2.getMethod("test", new Class [] {long.class});
Object result2 = method2.invoke(cls2.newInstance(), 4096);
Assert.assertEquals("Only the services from bundle impl4 should be selected", 8192, result2);
}
@Test
public void testClientSpecifyingTwoDifferentMethodsLimitedToDifferentProviders() {
Assert.fail();
}
@Test
public void testJAXPClientWantsJREImplementation1() throws Exception {
Bundle systembundle = mockSystemBundle();
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "javax.xml.parsers.DocumentBuilderFactory#newInstance()");
Bundle consumerBundle = mockConsumerBundle(headers, systembundle);
WeavingHook wh = new ClientWeavingHook(mockSpiFlyBundle(consumerBundle, systembundle).getBundleContext());
URL clsUrl = getClass().getResource("JaxpClient.class");
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.JaxpClient", consumerBundle);
wh.weave(wc);
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {});
Class<?> result = (Class<?>) method.invoke(cls.newInstance());
Assert.assertEquals("JAXP implementation from JRE", "com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl", result.getName());
}
// If there is an alternate implementation it should always be favoured over the JRE one
@Test
public void testJAXPClientWantsAltImplementation1() throws Exception {
Bundle systembundle = mockSystemBundle();
Bundle providerBundle = mockProviderBundle("impl3", 1, "META-INF/services/javax.xml.parsers.DocumentBuilderFactory");
Activator.activator.registerProviderBundle("javax.xml.parsers.DocumentBuilderFactory", providerBundle);
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "javax.xml.parsers.DocumentBuilderFactory#newInstance()");
Bundle consumerBundle = mockConsumerBundle(headers, providerBundle, systembundle);
WeavingHook wh = new ClientWeavingHook(mockSpiFlyBundle(consumerBundle, providerBundle, systembundle).getBundleContext());
URL clsUrl = getClass().getResource("JaxpClient.class");
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.JaxpClient", consumerBundle);
wh.weave(wc);
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {});
Class<?> result = (Class<?>) method.invoke(cls.newInstance());
Assert.assertEquals("JAXP implementation from JRE", "org.apache.aries.spifly.impl3.MyAltDocumentBuilderFactory", result.getName());
}
@Test
public void testJAXPClientWantsJREImplementation2() throws Exception {
Bundle systembundle = mockSystemBundle();
Bundle providerBundle = mockProviderBundle("impl3", 1, "META-INF/services/javax.xml.parsers.DocumentBuilderFactory");
Activator.activator.registerProviderBundle("javax.xml.parsers.DocumentBuilderFactory", providerBundle);
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "javax.xml.parsers.DocumentBuilderFactory#newInstance();bundleId=0");
Bundle consumerBundle = mockConsumerBundle(headers, providerBundle, systembundle);
WeavingHook wh = new ClientWeavingHook(mockSpiFlyBundle(consumerBundle, providerBundle, systembundle).getBundleContext());
URL clsUrl = getClass().getResource("JaxpClient.class");
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.JaxpClient", consumerBundle);
wh.weave(wc);
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {});
Class<?> result = (Class<?>) method.invoke(cls.newInstance());
Assert.assertEquals("JAXP implementation from JRE", "com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl", result.getName());
}
@Test
public void testJAXPClientWantsAltImplementation2() throws Exception {
Bundle systembundle = mockSystemBundle();
Bundle providerBundle = mockProviderBundle("impl3", 1, "META-INF/services/javax.xml.parsers.DocumentBuilderFactory");
Activator.activator.registerProviderBundle("javax.xml.parsers.DocumentBuilderFactory", providerBundle);
Dictionary<String, String> headers = new Hashtable<String, String>();
headers.put(SpiFlyConstants.SPI_CONSUMER_HEADER, "javax.xml.parsers.DocumentBuilderFactory#newInstance();bundle=impl3");
Bundle consumerBundle = mockConsumerBundle(headers, providerBundle, systembundle);
WeavingHook wh = new ClientWeavingHook(mockSpiFlyBundle(consumerBundle, providerBundle, systembundle).getBundleContext());
URL clsUrl = getClass().getResource("JaxpClient.class");
WovenClass wc = new MyWovenClass(clsUrl, "org.apache.aries.spifly.JaxpClient", consumerBundle);
wh.weave(wc);
Class<?> cls = wc.getDefinedClass();
Method method = cls.getMethod("test", new Class [] {});
Class<?> result = (Class<?>) method.invoke(cls.newInstance());
Assert.assertEquals("JAXP implementation from alternative bundle", "org.apache.aries.spifly.impl3.MyAltDocumentBuilderFactory", result.getName());
}
private Bundle mockSpiFlyBundle(Bundle ... bundles) throws Exception {
return mockSpiFlyBundle("spifly", new Version(1, 0, 0), bundles);
}
private Bundle mockSpiFlyBundle(String bsn, Version version, Bundle ... bundles) throws Exception {
Bundle spiFlyBundle = EasyMock.createMock(Bundle.class);
BundleContext spiFlyBundleContext = EasyMock.createMock(BundleContext.class);
EasyMock.expect(spiFlyBundleContext.getBundle()).andReturn(spiFlyBundle).anyTimes();
List<Bundle> allBundles = new ArrayList<Bundle>(Arrays.asList(bundles));
allBundles.add(spiFlyBundle);
EasyMock.expect(spiFlyBundleContext.getBundles()).andReturn(allBundles.toArray(new Bundle [] {})).anyTimes();
EasyMock.replay(spiFlyBundleContext);
EasyMock.expect(spiFlyBundle.getSymbolicName()).andReturn(bsn).anyTimes();
EasyMock.expect(spiFlyBundle.getVersion()).andReturn(version).anyTimes();
EasyMock.expect(spiFlyBundle.getBundleContext()).andReturn(spiFlyBundleContext).anyTimes();
EasyMock.replay(spiFlyBundle);
// Set the bundle context for testing purposes
Field bcField = Activator.class.getDeclaredField("bundleContext");
bcField.setAccessible(true);
bcField.set(Activator.activator, spiFlyBundle.getBundleContext());
return spiFlyBundle;
}
private Bundle mockProviderBundle(String subdir, long id, String ... resources) {
return mockProviderBundle(subdir, id, Version.emptyVersion, resources);
}
private Bundle mockProviderBundle(String subdir, long id, Version version, String ... resources) {
// Set up the classloader that will be used by the ASM-generated code as the TCCL.
// It can load a META-INF/services file
ClassLoader cl = new TestImplClassLoader(subdir, resources);
// The BundleWiring API is used on the bundle by the generated code to obtain its classloader
BundleWiring bw = EasyMock.createMock(BundleWiring.class);
EasyMock.expect(bw.getClassLoader()).andReturn(cl);
EasyMock.replay(bw);
Bundle providerBundle = EasyMock.createMock(Bundle.class);
EasyMock.expect(providerBundle.adapt(BundleWiring.class)).andReturn(bw);
EasyMock.expect(providerBundle.getSymbolicName()).andReturn(subdir).anyTimes();
EasyMock.expect(providerBundle.getBundleId()).andReturn(id).anyTimes();
EasyMock.expect(providerBundle.getVersion()).andReturn(version).anyTimes();
EasyMock.replay(providerBundle);
return providerBundle;
}
private Bundle mockConsumerBundle(Dictionary<String, String> headers, Bundle ... otherBundles) {
// Create a mock object for the client bundle which holds the code that uses ServiceLoader.load()
// or another SPI invocation.
BundleContext bc = EasyMock.createMock(BundleContext.class);
Bundle consumerBundle = EasyMock.createMock(Bundle.class);
EasyMock.expect(consumerBundle.getSymbolicName()).andReturn("testConsumer").anyTimes();
EasyMock.expect(consumerBundle.getHeaders()).andReturn(headers);
EasyMock.expect(consumerBundle.getBundleContext()).andReturn(bc);
EasyMock.expect(consumerBundle.getBundleId()).andReturn(Long.MAX_VALUE).anyTimes();
EasyMock.replay(consumerBundle);
List<Bundle> allBundles = new ArrayList<Bundle>(Arrays.asList(otherBundles));
allBundles.add(consumerBundle);
EasyMock.expect(bc.getBundles()).andReturn(allBundles.toArray(new Bundle [] {}));
EasyMock.replay(bc);
return consumerBundle;
}
private Bundle mockSystemBundle() {
Bundle systemBundle = EasyMock.createMock(Bundle.class);
EasyMock.expect(systemBundle.getBundleId()).andReturn(0L).anyTimes();
EasyMock.replay(systemBundle);
return systemBundle;
}
public static class TestImplClassLoader extends URLClassLoader {
private final List<String> resources;
private final String prefix;
public TestImplClassLoader(String subdir, String ... resources) {
super(new URL [] {}, TestImplClassLoader.class.getClassLoader());
this.prefix = TestImplClassLoader.class.getPackage().getName().replace('.', '/') + "/" + subdir + "/";
this.resources = Arrays.asList(resources);
}
@Override
public URL findResource(String name) {
if (resources.contains(name)) {
return getClass().getClassLoader().getResource(prefix + name);
} else {
return super.findResource(name);
}
}
@Override
public Enumeration<URL> findResources(String name) throws IOException {
if (resources.contains(name)) {
return getClass().getClassLoader().getResources(prefix + name);
} else {
return super.findResources(name);
}
}
}
private static class MyWovenClass implements WovenClass {
byte [] bytes;
final String className;
final Bundle bundleContainingOriginalClass;
List<String> dynamicImports = new ArrayList<String>();
boolean weavingComplete = false;
private MyWovenClass(URL clazz, String name, Bundle bundle) throws Exception {
bytes = Streams.suck(clazz.openStream());
className = name;
bundleContainingOriginalClass = bundle;
}
@Override
public byte[] getBytes() {
return bytes;
}
@Override
public void setBytes(byte[] newBytes) {
bytes = newBytes;
}
@Override
public List<String> getDynamicImports() {
return dynamicImports;
}
@Override
public boolean isWeavingComplete() {
return weavingComplete;
}
@Override
public String getClassName() {
return className;
}
@Override
public ProtectionDomain getProtectionDomain() {
return null;
}
@Override
public Class<?> getDefinedClass() {
try {
weavingComplete = true;
return new MyWovenClassClassLoader(className, getBytes(), getClass().getClassLoader(), bundleContainingOriginalClass).loadClass(className);
} catch (ClassNotFoundException e) {
e.printStackTrace();
return null;
}
}
@Override
public BundleWiring getBundleWiring() {
BundleWiring bw = EasyMock.createMock(BundleWiring.class);
EasyMock.expect(bw.getBundle()).andReturn(bundleContainingOriginalClass);
EasyMock.replay(bw);
return bw;
}
}
private static class MyWovenClassClassLoader extends ClassLoader implements BundleReference {
private final String className;
private final Bundle bundle;
private final byte [] bytes;
public MyWovenClassClassLoader(String className, byte[] bytes, ClassLoader parent, Bundle bundle) {
super(parent);
this.className = className;
this.bundle = bundle;
this.bytes = bytes;
}
@Override
protected synchronized Class<?> loadClass(String name, boolean resolve)
throws ClassNotFoundException {
if (name.equals(className)) {
return defineClass(className, bytes, 0, bytes.length);
} else {
return super.loadClass(name, resolve);
}
}
@Override
public Class<?> loadClass(String name) throws ClassNotFoundException {
return loadClass(name, false);
}
@Override
public Bundle getBundle() {
return bundle;
}
}
}
| Small test fix.
git-svn-id: 212869a37fe990abe2323f86150f3c4d5a6279c2@1052209 13f79535-47bb-0310-9956-ffa450edef68
| spi-fly/contrib/pilot_using_weavinghook/SpiFlyTests/src/org/apache/aries/spifly/ClientWeavingHookTest.java | Small test fix. | <ide><path>pi-fly/contrib/pilot_using_weavinghook/SpiFlyTests/src/org/apache/aries/spifly/ClientWeavingHookTest.java
<ide> Class<?> cls = wc.getDefinedClass();
<ide> Method method = cls.getMethod("test", new Class [] {String.class});
<ide> Object result = method.invoke(cls.newInstance(), "hello");
<del> Assert.assertEquals("Only the services from bundle impl2 should be selected", "ollehHELLO5", result);
<add> Assert.assertEquals("All providers should be selected for this one", "ollehHELLO5", result);
<ide>
<ide> // Weave the AltTestClient class.
<ide> URL cls2Url = getClass().getResource("AltTestClient.class"); |
|
Java | apache-2.0 | 16b902a6ade0045e0711d269408786626d0fd9bd | 0 | motown-io/motown,pqtoan/motown | /**
* Copyright (C) 2013 Motown.IO ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.motown.ocpp.v15.soap.centralsystem;
import io.motown.domain.api.chargingstation.Measurand;
import javax.annotation.Nullable;
/**
* Adapter which translates a {@code io.motown.ocpp.v15.soap.centralsystem.schema.Measurand} to a {@code Measurand}.
*/
class MeasurandTranslator implements Translator<Measurand> {
private final io.motown.ocpp.v15.soap.centralsystem.schema.Measurand measurand;
/**
* Creates a {@code MeasurandTranslationAdapter}.
*
* @param measurand the measurand to translate.
*/
public MeasurandTranslator(@Nullable io.motown.ocpp.v15.soap.centralsystem.schema.Measurand measurand) {
this.measurand = measurand;
}
/**
* {@inheritDoc}
*/
@Override
public Measurand translate() {
if (this.measurand == null) {
// In OCPP 1.5, IMPORTED_ACTIVE_ENERGY_REGISTER is the default value.
return Measurand.IMPORTED_ACTIVE_ENERGY_REGISTER;
}
Measurand result;
switch (this.measurand) {
case ENERGY_ACTIVE_EXPORT_REGISTER:
result = Measurand.EXPORTED_ACTIVE_ENERGY_REGISTER;
break;
case ENERGY_ACTIVE_IMPORT_REGISTER:
result = Measurand.IMPORTED_ACTIVE_ENERGY_REGISTER;
break;
case ENERGY_REACTIVE_EXPORT_REGISTER:
result = Measurand.EXPORTED_REACTIVE_ENERGY_REGISTER;
break;
case ENERGY_REACTIVE_IMPORT_REGISTER:
result = Measurand.IMPORTED_REACTIVE_ENERGY_REGISTER;
break;
case ENERGY_ACTIVE_EXPORT_INTERVAL:
result = Measurand.EXPORTED_ACTIVE_ENERGY_INTERVAL;
break;
case ENERGY_ACTIVE_IMPORT_INTERVAL:
result = Measurand.IMPORTED_ACTIVE_ENERGY_INTERVAL;
break;
case ENERGY_REACTIVE_EXPORT_INTERVAL:
result = Measurand.EXPORTED_REACTIVE_ENERGY_INTERVAL;
break;
case ENERGY_REACTIVE_IMPORT_INTERVAL:
result = Measurand.IMPORTED_REACTIVE_ENERGY_INTERVAL;
break;
case POWER_ACTIVE_EXPORT:
result = Measurand.EXPORTED_ACTIVE_POWER;
break;
case POWER_ACTIVE_IMPORT:
result = Measurand.IMPORTED_ACTIVE_POWER;
break;
case POWER_REACTIVE_EXPORT:
result = Measurand.EXPORTED_REACTIVE_POWER;
break;
case POWER_REACTIVE_IMPORT:
result =Measurand.IMPORTED_REACTIVE_POWER;
break;
case CURRENT_EXPORT:
result = Measurand.EXPORTED_CURRENT;
break;
case CURRENT_IMPORT:
result = Measurand.IMPORTED_CURRENT;
break;
case VOLTAGE:
result = Measurand.VOLTAGE;
break;
case TEMPERATURE:
result = Measurand.TEMPERATURE;
break;
default:
throw new AssertionError(String.format("Unknown value for Measurand: '%s'", measurand));
}
return result;
}
}
| ocpp/v15-soap/src/main/java/io/motown/ocpp/v15/soap/centralsystem/MeasurandTranslator.java | /**
* Copyright (C) 2013 Motown.IO ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.motown.ocpp.v15.soap.centralsystem;
import io.motown.domain.api.chargingstation.Measurand;
import javax.annotation.Nullable;
/**
* Adapter which translates a {@code io.motown.ocpp.v15.soap.centralsystem.schema.Measurand} to a {@code Measurand}.
*/
class MeasurandTranslator implements Translator<Measurand> {
private final io.motown.ocpp.v15.soap.centralsystem.schema.Measurand measurand;
/**
* Creates a {@code MeasurandTranslationAdapter}.
*
* @param measurand the measurand to translate.
*/
public MeasurandTranslator(@Nullable io.motown.ocpp.v15.soap.centralsystem.schema.Measurand measurand) {
this.measurand = measurand;
}
/**
* {@inheritDoc}
*/
@Override
public Measurand translate() {
if (this.measurand == null) {
// In OCPP 1.5, IMPORTED_ACTIVE_ENERGY_REGISTER is the default value.
return Measurand.IMPORTED_ACTIVE_ENERGY_REGISTER;
}
switch (this.measurand) {
case ENERGY_ACTIVE_EXPORT_REGISTER:
return Measurand.EXPORTED_ACTIVE_ENERGY_REGISTER;
case ENERGY_ACTIVE_IMPORT_REGISTER:
return Measurand.IMPORTED_ACTIVE_ENERGY_REGISTER;
case ENERGY_REACTIVE_EXPORT_REGISTER:
return Measurand.EXPORTED_REACTIVE_ENERGY_REGISTER;
case ENERGY_REACTIVE_IMPORT_REGISTER:
return Measurand.IMPORTED_REACTIVE_ENERGY_REGISTER;
case ENERGY_ACTIVE_EXPORT_INTERVAL:
return Measurand.EXPORTED_ACTIVE_ENERGY_INTERVAL;
case ENERGY_ACTIVE_IMPORT_INTERVAL:
return Measurand.IMPORTED_ACTIVE_ENERGY_INTERVAL;
case ENERGY_REACTIVE_EXPORT_INTERVAL:
return Measurand.EXPORTED_REACTIVE_ENERGY_INTERVAL;
case ENERGY_REACTIVE_IMPORT_INTERVAL:
return Measurand.IMPORTED_REACTIVE_ENERGY_INTERVAL;
case POWER_ACTIVE_EXPORT:
return Measurand.EXPORTED_ACTIVE_POWER;
case POWER_ACTIVE_IMPORT:
return Measurand.IMPORTED_ACTIVE_POWER;
case POWER_REACTIVE_EXPORT:
return Measurand.EXPORTED_REACTIVE_POWER;
case POWER_REACTIVE_IMPORT:
return Measurand.IMPORTED_REACTIVE_POWER;
case CURRENT_EXPORT:
return Measurand.EXPORTED_CURRENT;
case CURRENT_IMPORT:
return Measurand.IMPORTED_CURRENT;
case VOLTAGE:
return Measurand.VOLTAGE;
case TEMPERATURE:
return Measurand.TEMPERATURE;
default:
throw new AssertionError(String.format("Unknown value for Measurand: '%s'", measurand));
}
}
}
| Reduce cyclomatic complexity
| ocpp/v15-soap/src/main/java/io/motown/ocpp/v15/soap/centralsystem/MeasurandTranslator.java | Reduce cyclomatic complexity | <ide><path>cpp/v15-soap/src/main/java/io/motown/ocpp/v15/soap/centralsystem/MeasurandTranslator.java
<ide> return Measurand.IMPORTED_ACTIVE_ENERGY_REGISTER;
<ide> }
<ide>
<add> Measurand result;
<add>
<ide> switch (this.measurand) {
<ide> case ENERGY_ACTIVE_EXPORT_REGISTER:
<del> return Measurand.EXPORTED_ACTIVE_ENERGY_REGISTER;
<add> result = Measurand.EXPORTED_ACTIVE_ENERGY_REGISTER;
<add> break;
<ide> case ENERGY_ACTIVE_IMPORT_REGISTER:
<del> return Measurand.IMPORTED_ACTIVE_ENERGY_REGISTER;
<add> result = Measurand.IMPORTED_ACTIVE_ENERGY_REGISTER;
<add> break;
<ide> case ENERGY_REACTIVE_EXPORT_REGISTER:
<del> return Measurand.EXPORTED_REACTIVE_ENERGY_REGISTER;
<add> result = Measurand.EXPORTED_REACTIVE_ENERGY_REGISTER;
<add> break;
<ide> case ENERGY_REACTIVE_IMPORT_REGISTER:
<del> return Measurand.IMPORTED_REACTIVE_ENERGY_REGISTER;
<add> result = Measurand.IMPORTED_REACTIVE_ENERGY_REGISTER;
<add> break;
<ide> case ENERGY_ACTIVE_EXPORT_INTERVAL:
<del> return Measurand.EXPORTED_ACTIVE_ENERGY_INTERVAL;
<add> result = Measurand.EXPORTED_ACTIVE_ENERGY_INTERVAL;
<add> break;
<ide> case ENERGY_ACTIVE_IMPORT_INTERVAL:
<del> return Measurand.IMPORTED_ACTIVE_ENERGY_INTERVAL;
<add> result = Measurand.IMPORTED_ACTIVE_ENERGY_INTERVAL;
<add> break;
<ide> case ENERGY_REACTIVE_EXPORT_INTERVAL:
<del> return Measurand.EXPORTED_REACTIVE_ENERGY_INTERVAL;
<add> result = Measurand.EXPORTED_REACTIVE_ENERGY_INTERVAL;
<add> break;
<ide> case ENERGY_REACTIVE_IMPORT_INTERVAL:
<del> return Measurand.IMPORTED_REACTIVE_ENERGY_INTERVAL;
<add> result = Measurand.IMPORTED_REACTIVE_ENERGY_INTERVAL;
<add> break;
<ide> case POWER_ACTIVE_EXPORT:
<del> return Measurand.EXPORTED_ACTIVE_POWER;
<add> result = Measurand.EXPORTED_ACTIVE_POWER;
<add> break;
<ide> case POWER_ACTIVE_IMPORT:
<del> return Measurand.IMPORTED_ACTIVE_POWER;
<add> result = Measurand.IMPORTED_ACTIVE_POWER;
<add> break;
<ide> case POWER_REACTIVE_EXPORT:
<del> return Measurand.EXPORTED_REACTIVE_POWER;
<add> result = Measurand.EXPORTED_REACTIVE_POWER;
<add> break;
<ide> case POWER_REACTIVE_IMPORT:
<del> return Measurand.IMPORTED_REACTIVE_POWER;
<add> result =Measurand.IMPORTED_REACTIVE_POWER;
<add> break;
<ide> case CURRENT_EXPORT:
<del> return Measurand.EXPORTED_CURRENT;
<add> result = Measurand.EXPORTED_CURRENT;
<add> break;
<ide> case CURRENT_IMPORT:
<del> return Measurand.IMPORTED_CURRENT;
<add> result = Measurand.IMPORTED_CURRENT;
<add> break;
<ide> case VOLTAGE:
<del> return Measurand.VOLTAGE;
<add> result = Measurand.VOLTAGE;
<add> break;
<ide> case TEMPERATURE:
<del> return Measurand.TEMPERATURE;
<add> result = Measurand.TEMPERATURE;
<add> break;
<ide> default:
<ide> throw new AssertionError(String.format("Unknown value for Measurand: '%s'", measurand));
<ide> }
<add>
<add> return result;
<ide> }
<ide>
<ide> } |
|
Java | mit | 7e04e060e23b28aa14a21c74d3b3a047dd339c3c | 0 | tkob/yokohamaunit,tkob/yokohamaunit | package yokohama.unit.translator;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.bcel.Constants;
import org.apache.bcel.generic.AnnotationEntryGen;
import org.apache.bcel.generic.ArrayType;
import org.apache.bcel.generic.BranchInstruction;
import org.apache.bcel.generic.ClassGen;
import org.apache.bcel.generic.ConstantPoolGen;
import org.apache.bcel.generic.InstructionConstants;
import org.apache.bcel.generic.InstructionFactory;
import org.apache.bcel.generic.InstructionHandle;
import org.apache.bcel.generic.InstructionList;
import org.apache.bcel.generic.LocalVariableGen;
import org.apache.bcel.generic.MethodGen;
import org.apache.bcel.generic.ObjectType;
import org.apache.bcel.generic.PUSH;
import org.apache.bcel.generic.Type;
import org.apache.commons.collections4.ListUtils;
import yokohama.unit.ast.Kind;
import yokohama.unit.ast_junit.CatchClause;
import yokohama.unit.ast_junit.CompilationUnit;
import yokohama.unit.ast_junit.InvokeExpr;
import yokohama.unit.ast_junit.IsNotStatement;
import yokohama.unit.ast_junit.IsStatement;
import yokohama.unit.ast_junit.Statement;
import yokohama.unit.ast_junit.TestMethod;
import yokohama.unit.ast_junit.Var;
import yokohama.unit.ast_junit.VarDeclVisitor;
import yokohama.unit.ast_junit.VarInitStatement;
import yokohama.unit.util.Pair;
public class BcelJUnitAstCompiler implements JUnitAstCompiler {
public static class CaughtExceptionVarVisitor {
public static List<Pair<yokohama.unit.ast_junit.Type, String>> sortedSet(Stream<Pair<yokohama.unit.ast_junit.Type, String>> i) {
return i.collect(Collectors.toSet())
.stream()
.sorted((o1, o2) -> o1.getSecond().compareTo(o2.getSecond()))
.collect(Collectors.toList());
}
public Stream<Pair<yokohama.unit.ast_junit.Type, String>> visitTestMethod(TestMethod testMethod) {
return visitStatements(testMethod.getStatements());
}
public Stream<Pair<yokohama.unit.ast_junit.Type, String>> visitStatements(List<Statement> statements) {
return statements.stream().flatMap(this::visitStatement);
}
public Stream<Pair<yokohama.unit.ast_junit.Type, String>> visitStatement(Statement statement) {
return statement.accept(
isStatement -> Stream.<Pair<yokohama.unit.ast_junit.Type, String>>empty(),
isNotStatement -> Stream.<Pair<yokohama.unit.ast_junit.Type, String>>empty(),
varInitStatement -> Stream.<Pair<yokohama.unit.ast_junit.Type, String>>empty(),
returnIsStatement -> Stream.<Pair<yokohama.unit.ast_junit.Type, String>>empty(),
returnIsNotStatement -> Stream.<Pair<yokohama.unit.ast_junit.Type, String>>empty(),
invokeVoidStatement -> Stream.<Pair<yokohama.unit.ast_junit.Type, String>>empty(),
tryStatement ->
Stream.concat(
visitStatements(tryStatement.getTryStatements()),
Stream.concat(
tryStatement.getCatchClauses().stream().flatMap(this::visitCatchClause),
visitStatements(tryStatement.getFinallyStatements()))),
ifStatement ->
Stream.concat(
visitStatements(ifStatement.getThen()),
visitStatements(ifStatement.getOtherwise())));
}
public Stream<Pair<yokohama.unit.ast_junit.Type, String>> visitCatchClause(CatchClause catchClause) {
return Stream.concat(
Stream.of(
new Pair<>(
catchClause.getClassType().toType(),
catchClause.getVar().getName())),
visitStatements(catchClause.getStatements()));
}
}
@Override
public boolean compile(
Path docyPath,
CompilationUnit ast,
String className,
String packageName,
List<String> classPath,
Optional<Path> dest,
List<String> javacArgs) {
ClassGen cg = new ClassGen(
packageName.equals("") ? className : packageName + "." + className,
"java.lang.Object", // super class
docyPath.getFileName().toString(), // source file name
Constants.ACC_PUBLIC | Constants.ACC_SUPER,
null // implemented interfaces
);
// set class file version to Java 1.6
cg.setMajor(49);
cg.setMinor(0);
ConstantPoolGen cp = cg.getConstantPool(); // cg creates constant pool
cg.addEmptyConstructor(Constants.ACC_PUBLIC);
for (TestMethod testMethod : ast.getClassDecl().getTestMethods()) {
visitTestMethod(testMethod, cg, cp);
}
try {
Path classFilePath = makeClassFilePath(dest, packageName, className);
cg.getJavaClass().dump(classFilePath.toFile());
} catch(java.io.IOException e) {
System.err.println(e);
}
return true;
}
public Path makeClassFilePath(Optional<Path> dest, String packageName, String className) {
Path classFile = (dest.isPresent() ? dest.get(): Paths.get("."))
.resolve(Paths.get(packageName.replace('.', '/')))
.resolve(className + ".class");
return classFile;
}
private void visitTestMethod(TestMethod testMethod, ClassGen cg, ConstantPoolGen cp) {
InstructionList il = new InstructionList();
MethodGen mg = new MethodGen(Constants.ACC_PUBLIC, // access flags
Type.VOID, // return type of a test method is always void
Type.NO_ARGS, new String[]{}, // test methods have no arguments
testMethod.getName(),
cg.getClassName(),
il, cp);
AnnotationEntryGen ag = new AnnotationEntryGen(
new ObjectType("org.junit.Test"),
Arrays.asList(),
true,
cp);
mg.addAnnotationEntry(ag);
InstructionFactory factory = new InstructionFactory(cg);
Map<String, LocalVariableGen> locals = new HashMap<>();
List<Pair<yokohama.unit.ast_junit.Type, String>> varDecls =
VarDeclVisitor.sortedSet(new VarDeclVisitor().visitTestMethod(testMethod));
List<Pair<yokohama.unit.ast_junit.Type, String>> caughtExVars =
CaughtExceptionVarVisitor.sortedSet(new CaughtExceptionVarVisitor().visitTestMethod(testMethod));
for (Pair<yokohama.unit.ast_junit.Type,String> pair :
ListUtils.union(varDecls, caughtExVars)) {
yokohama.unit.ast_junit.Type type = pair.getFirst();
String name = pair.getSecond();
if (locals.containsKey(name))
throw new RuntimeException("duplicate local variable: " + name);
LocalVariableGen lv = mg.addLocalVariable(name, typeOf(type), null, null);
locals.put(name, lv);
}
for (Statement statement : testMethod.getStatements()) {
visitStatement(statement, locals, mg, il, factory, cp);
}
il.append(InstructionConstants.RETURN);
mg.setMaxStack();
cg.addMethod(mg.getMethod());
il.dispose();
}
private void visitStatement(
Statement statement,
Map<String, LocalVariableGen> locals,
MethodGen mg,
InstructionList il,
InstructionFactory factory,
ConstantPoolGen cp) {
statement.<Void>accept(
isStatement -> {
visitIsStatement(isStatement, locals, il, factory, cp);
return null;
},
isNotStatement -> {
visitIsNotStatement(isNotStatement, locals, il, factory, cp);
return null;
},
varInitStatement -> {
visitVarInitStatement(varInitStatement, locals, il, factory, cp);
return null;
},
returnIsStatement -> { return null; },
returnIsNotStatement -> { return null; },
invokeVoidStatement -> { return null; },
tryStatement -> {
InstructionHandle startTry = il.append(InstructionFactory.NOP);
for (Statement s : tryStatement.getTryStatements()) {
visitStatement(s, locals, mg, il, factory, cp);
}
InstructionHandle endTry = il.append(InstructionFactory.NOP);
BranchInstruction goto_ = InstructionFactory.createBranchInstruction(Constants.GOTO, null);
il.append(goto_);
List<BranchInstruction> catchExits = new ArrayList<>();
for (CatchClause catchClause : tryStatement.getCatchClauses()) {
LocalVariableGen ex = locals.get(catchClause.getVar().getName());
InstructionHandle startCatch = il.append(
InstructionFactory.createStore(ex.getType(), ex.getIndex()));
mg.addExceptionHandler(
startTry,
endTry,
startCatch,
(ObjectType)typeOf(catchClause.getClassType().toType()));
for (Statement s : catchClause.getStatements()) {
this.visitStatement(s, locals, mg, il, factory, cp);
}
BranchInstruction exitCatch = InstructionFactory.createBranchInstruction(Constants.GOTO, null);
il.append(exitCatch);
catchExits.add(exitCatch);
}
InstructionHandle startFinally = il.append(InstructionFactory.NOP);
for (Statement s : tryStatement.getFinallyStatements()) {
visitStatement(s, locals, mg, il, factory, cp);
}
goto_.setTarget(startFinally);
for (BranchInstruction bi : catchExits) {
bi.setTarget(startFinally);
}
return null;
},
ifStatement -> {
LocalVariableGen lv = locals.get(ifStatement.getCond().getName());
il.append(InstructionFactory.createLoad(lv.getType(), lv.getIndex()));
// if
BranchInstruction ifeq = InstructionFactory.createBranchInstruction(Constants.IFEQ, null);
il.append(ifeq);
// then
for (Statement thenStatement : ifStatement.getThen()) {
visitStatement(thenStatement, locals, mg, il, factory, cp);
}
BranchInstruction goto_ = InstructionFactory.createBranchInstruction(Constants.GOTO, null);
il.append(goto_);
// else
InstructionHandle else_ = il.append(InstructionFactory.NOP);
for (Statement elseStatement : ifStatement.getOtherwise()) {
visitStatement(elseStatement, locals, mg, il, factory, cp);
}
InstructionHandle fi = il.append(InstructionFactory.NOP);
// tie the knot
ifeq.setTarget(else_);
goto_.setTarget(fi);
return null;
}
);
}
private void visitIsStatement(
IsStatement isStatement,
Map<String, LocalVariableGen> locals,
InstructionList il,
InstructionFactory factory,
ConstantPoolGen cp) {
LocalVariableGen subject = locals.get(isStatement.getSubject().getName());
LocalVariableGen complement = locals.get(isStatement.getComplement().getName());
il.append(InstructionFactory.createLoad(subject.getType(), subject.getIndex()));
il.append(InstructionFactory.createLoad(complement.getType(), complement.getIndex()));
il.append(
factory.createInvoke(
"org.junit.Assert",
"assertThat",
Type.VOID,
new Type[] { Type.OBJECT, new ObjectType("org.hamcrest.Matcher") },
Constants.INVOKESTATIC));
}
private void visitIsNotStatement(
IsNotStatement isNotStatement,
Map<String, LocalVariableGen> locals,
InstructionList il,
InstructionFactory factory,
ConstantPoolGen cp) {
LocalVariableGen subject = locals.get(isNotStatement.getSubject().getName());
LocalVariableGen complement = locals.get(isNotStatement.getComplement().getName());
il.append(InstructionFactory.createLoad(subject.getType(), subject.getIndex()));
il.append(InstructionFactory.createLoad(complement.getType(), complement.getIndex()));
il.append(
factory.createInvoke(
"org.hamcrest.CoreMatchers",
"not",
new ObjectType("org.hamcrest.Matcher"),
new Type[] { complement.getType() },
Constants.INVOKESTATIC));
il.append(
factory.createInvoke(
"org.junit.Assert",
"assertThat",
Type.VOID,
new Type[] { Type.OBJECT, new ObjectType("org.hamcrest.Matcher") },
Constants.INVOKESTATIC));
}
private void visitVarInitStatement(
VarInitStatement varInitStatement,
Map<String, LocalVariableGen> locals,
InstructionList il,
InstructionFactory factory,
ConstantPoolGen cp) {
LocalVariableGen var = locals.get(varInitStatement.getName());
Type type = typeOf(varInitStatement.getType());
varInitStatement.getValue().<Void>accept(
varExpr -> {
LocalVariableGen from = locals.get(varExpr.getName());
il.append(InstructionFactory.createLoad(from.getType(), from.getIndex()));
il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
return null;
},
instanceOfMatcherExpr -> {
il.append(new PUSH(cp, new ObjectType(instanceOfMatcherExpr.getClassName())));
il.append(factory.createInvoke(
"org.hamcrest.CoreMatchers",
"instanceOf",
new ObjectType("org.hamcrest.Matcher"),
new Type[] { new ObjectType("java.lang.Class") },
Constants.INVOKESTATIC));
il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
return null;
},
nullValueMatcherExpr -> {
il.append(factory.createInvoke(
"org.hamcrest.CoreMatchers",
"nullValue",
new ObjectType("org.hamcrest.Matcher"),
Type.NO_ARGS,
Constants.INVOKESTATIC));
il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
return null;
},
conjunctionMatcherExpr -> {
List<Var> matchers = conjunctionMatcherExpr.getMatchers();
int numMatchers = matchers.size();
// first create array
il.append(new PUSH(cp, numMatchers));
il.append(factory.createNewArray(new ObjectType("org.hamcrest.Matcher"), (short) 1));
// fill the array with the matchers
for (int i = 0; i < numMatchers; i++) {
String name = matchers.get(i).getName();
LocalVariableGen lv = locals.get(name);
il.append(InstructionConstants.DUP);
il.append(new PUSH(cp, i));
il.append(InstructionFactory.createLoad(lv.getType(), lv.getIndex()));
il.append(InstructionConstants.AASTORE);
}
// then call allOf with the array(=vararg)
il.append(factory.createInvoke(
"org.hamcrest.CoreMatchers",
"allOf",
new ObjectType("org.hamcrest.Matcher"),
new Type[] { new ArrayType(new ObjectType("org.hamcrest.Matcher"), 1) },
Constants.INVOKESTATIC));
il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
return null;
},
equalToMatcherExpr -> {
Var operand = equalToMatcherExpr.getOperand();
LocalVariableGen lv = locals.get(operand.getName());
il.append(InstructionFactory.createLoad(lv.getType(), lv.getIndex()));
il.append(factory.createInvoke(
"org.hamcrest.CoreMatchers",
"is",
new ObjectType("org.hamcrest.Matcher"),
new Type[] { lv.getType() },
Constants.INVOKESTATIC));
il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
return null;
},
suchThatMatcherExpr -> { return null; },
newExpr -> {
il.append(factory.createNew(newExpr.getType()));
il.append(InstructionConstants.DUP);
il.append(factory.createInvoke(
newExpr.getType(),
"<init>",
Type.VOID,
Type.NO_ARGS,
Constants.INVOKESPECIAL));
il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
return null;
},
strLitExpr -> {
il.append(new PUSH(cp, strLitExpr.getText()));
il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
return null;
},
nullExpr -> {
il.append(InstructionConstants.ACONST_NULL);
il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
return null;
},
invokeExpr -> {
// first push target object
LocalVariableGen object = locals.get(invokeExpr.getObject().getName());
il.append(InstructionFactory.createLoad(object.getType(), object.getIndex()));
// push arguments
for (Var arg : invokeExpr.getArgs()) {
LocalVariableGen lv = locals.get(arg.getName());
il.append(InstructionFactory.createLoad(lv.getType(), lv.getIndex()));
}
// then call method
il.append(factory.createInvoke(
object.getType().toString(), // TODO: ?
invokeExpr.getMethodName(),
typeOf(invokeExpr.getReturnType()),
invokeExpr.getArgTypes().stream()
.map(BcelJUnitAstCompiler::typeOf)
.collect(Collectors.toList())
.toArray(new Type[]{}),
invokeExpr.getInstruction() == InvokeExpr.Instruction.VIRTUAL
? Constants.INVOKEVIRTUAL
: Constants.INVOKEINTERFACE));
il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
return null;
},
invokeStaticExpr -> {
for (Var arg : invokeStaticExpr.getArgs()) {
LocalVariableGen lv = locals.get(arg.getName());
il.append(InstructionFactory.createLoad(lv.getType(), lv.getIndex()));
}
il.append(factory.createInvoke(
invokeStaticExpr.getClazz().getText(),
invokeStaticExpr.getMethodName(),
typeOf(invokeStaticExpr.getReturnType()),
invokeStaticExpr.getArgTypes().stream()
.map(BcelJUnitAstCompiler::typeOf)
.collect(Collectors.toList())
.toArray(new Type[]{}),
Constants.INVOKESTATIC));
il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
return null;
},
intLitExpr -> {
il.append(new PUSH(cp, intLitExpr.getValue()));
il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
return null;
},
classLitExpr -> {
il.append(new PUSH(cp, new ObjectType(classLitExpr.getType().getText())));
il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
return null;
},
equalOpExpr -> {
LocalVariableGen lhs = locals.get(equalOpExpr.getLhs().getName());
il.append(InstructionFactory.createLoad(lhs.getType(), lhs.getIndex()));
LocalVariableGen rhs = locals.get(equalOpExpr.getRhs().getName());
il.append(InstructionFactory.createLoad(rhs.getType(), rhs.getIndex()));
// if
BranchInstruction if_acmpne = InstructionFactory.createBranchInstruction(Constants.IF_ACMPNE, null);
il.append(if_acmpne);
// then
il.append(new PUSH(cp, true));
BranchInstruction goto_ = InstructionFactory.createBranchInstruction(Constants.GOTO, null);
il.append(goto_);
// else
InstructionHandle else_ = il.append(new PUSH(cp, false));
InstructionHandle store = il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
// tie the knot
if_acmpne.setTarget(else_);
goto_.setTarget(store);
return null;
});
}
static Type typeOf(yokohama.unit.ast_junit.Type type) {
int dims = type.getDims();
if (dims == 0) {
return type.getNonArrayType().accept(
primitiveType -> {
Kind kind = primitiveType.getKind();
switch (kind) {
case BOOLEAN: return Type.BOOLEAN;
case BYTE: return Type.BYTE;
case SHORT: return Type.SHORT;
case INT: return Type.INT;
case LONG: return Type.LONG;
case CHAR: return Type.CHAR;
case FLOAT: return Type.FLOAT;
case DOUBLE: return Type.DOUBLE;
}
throw new RuntimeException("should not reach here");
},
classType -> new ObjectType(classType.getText()));
} else {
return new ArrayType(
typeOf(new yokohama.unit.ast_junit.Type(type.getNonArrayType() , 0)),
dims);
}
}
}
| src/main/java/yokohama/unit/translator/BcelJUnitAstCompiler.java | package yokohama.unit.translator;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.bcel.Constants;
import org.apache.bcel.generic.AnnotationEntryGen;
import org.apache.bcel.generic.ArrayType;
import org.apache.bcel.generic.BranchInstruction;
import org.apache.bcel.generic.ClassGen;
import org.apache.bcel.generic.ConstantPoolGen;
import org.apache.bcel.generic.InstructionConstants;
import org.apache.bcel.generic.InstructionFactory;
import org.apache.bcel.generic.InstructionHandle;
import org.apache.bcel.generic.InstructionList;
import org.apache.bcel.generic.LocalVariableGen;
import org.apache.bcel.generic.MethodGen;
import org.apache.bcel.generic.ObjectType;
import org.apache.bcel.generic.PUSH;
import org.apache.bcel.generic.Type;
import org.apache.commons.collections4.ListUtils;
import yokohama.unit.ast.Kind;
import yokohama.unit.ast_junit.CatchClause;
import yokohama.unit.ast_junit.CompilationUnit;
import yokohama.unit.ast_junit.IsNotStatement;
import yokohama.unit.ast_junit.IsStatement;
import yokohama.unit.ast_junit.Statement;
import yokohama.unit.ast_junit.TestMethod;
import yokohama.unit.ast_junit.Var;
import yokohama.unit.ast_junit.VarDeclVisitor;
import yokohama.unit.ast_junit.VarInitStatement;
import yokohama.unit.util.Pair;
public class BcelJUnitAstCompiler implements JUnitAstCompiler {
public static class CaughtExceptionVarVisitor {
public static List<Pair<yokohama.unit.ast_junit.Type, String>> sortedSet(Stream<Pair<yokohama.unit.ast_junit.Type, String>> i) {
return i.collect(Collectors.toSet())
.stream()
.sorted((o1, o2) -> o1.getSecond().compareTo(o2.getSecond()))
.collect(Collectors.toList());
}
public Stream<Pair<yokohama.unit.ast_junit.Type, String>> visitTestMethod(TestMethod testMethod) {
return visitStatements(testMethod.getStatements());
}
public Stream<Pair<yokohama.unit.ast_junit.Type, String>> visitStatements(List<Statement> statements) {
return statements.stream().flatMap(this::visitStatement);
}
public Stream<Pair<yokohama.unit.ast_junit.Type, String>> visitStatement(Statement statement) {
return statement.accept(
isStatement -> Stream.<Pair<yokohama.unit.ast_junit.Type, String>>empty(),
isNotStatement -> Stream.<Pair<yokohama.unit.ast_junit.Type, String>>empty(),
varInitStatement -> Stream.<Pair<yokohama.unit.ast_junit.Type, String>>empty(),
returnIsStatement -> Stream.<Pair<yokohama.unit.ast_junit.Type, String>>empty(),
returnIsNotStatement -> Stream.<Pair<yokohama.unit.ast_junit.Type, String>>empty(),
invokeVoidStatement -> Stream.<Pair<yokohama.unit.ast_junit.Type, String>>empty(),
tryStatement ->
Stream.concat(
visitStatements(tryStatement.getTryStatements()),
Stream.concat(
tryStatement.getCatchClauses().stream().flatMap(this::visitCatchClause),
visitStatements(tryStatement.getFinallyStatements()))),
ifStatement ->
Stream.concat(
visitStatements(ifStatement.getThen()),
visitStatements(ifStatement.getOtherwise())));
}
public Stream<Pair<yokohama.unit.ast_junit.Type, String>> visitCatchClause(CatchClause catchClause) {
return Stream.concat(
Stream.of(
new Pair<>(
catchClause.getClassType().toType(),
catchClause.getVar().getName())),
visitStatements(catchClause.getStatements()));
}
}
@Override
public boolean compile(
Path docyPath,
CompilationUnit ast,
String className,
String packageName,
List<String> classPath,
Optional<Path> dest,
List<String> javacArgs) {
ClassGen cg = new ClassGen(
packageName.equals("") ? className : packageName + "." + className,
"java.lang.Object", // super class
docyPath.getFileName().toString(), // source file name
Constants.ACC_PUBLIC | Constants.ACC_SUPER,
null // implemented interfaces
);
// set class file version to Java 1.6
cg.setMajor(49);
cg.setMinor(0);
ConstantPoolGen cp = cg.getConstantPool(); // cg creates constant pool
cg.addEmptyConstructor(Constants.ACC_PUBLIC);
for (TestMethod testMethod : ast.getClassDecl().getTestMethods()) {
visitTestMethod(testMethod, cg, cp);
}
try {
Path classFilePath = makeClassFilePath(dest, packageName, className);
cg.getJavaClass().dump(classFilePath.toFile());
} catch(java.io.IOException e) {
System.err.println(e);
}
return true;
}
public Path makeClassFilePath(Optional<Path> dest, String packageName, String className) {
Path classFile = (dest.isPresent() ? dest.get(): Paths.get("."))
.resolve(Paths.get(packageName.replace('.', '/')))
.resolve(className + ".class");
return classFile;
}
private void visitTestMethod(TestMethod testMethod, ClassGen cg, ConstantPoolGen cp) {
InstructionList il = new InstructionList();
MethodGen mg = new MethodGen(Constants.ACC_PUBLIC, // access flags
Type.VOID, // return type of a test method is always void
Type.NO_ARGS, new String[]{}, // test methods have no arguments
testMethod.getName(),
cg.getClassName(),
il, cp);
AnnotationEntryGen ag = new AnnotationEntryGen(
new ObjectType("org.junit.Test"),
Arrays.asList(),
true,
cp);
mg.addAnnotationEntry(ag);
InstructionFactory factory = new InstructionFactory(cg);
Map<String, LocalVariableGen> locals = new HashMap<>();
List<Pair<yokohama.unit.ast_junit.Type, String>> varDecls =
VarDeclVisitor.sortedSet(new VarDeclVisitor().visitTestMethod(testMethod));
List<Pair<yokohama.unit.ast_junit.Type, String>> caughtExVars =
CaughtExceptionVarVisitor.sortedSet(new CaughtExceptionVarVisitor().visitTestMethod(testMethod));
for (Pair<yokohama.unit.ast_junit.Type,String> pair :
ListUtils.union(varDecls, caughtExVars)) {
yokohama.unit.ast_junit.Type type = pair.getFirst();
String name = pair.getSecond();
if (locals.containsKey(name))
throw new RuntimeException("duplicate local variable: " + name);
LocalVariableGen lv = mg.addLocalVariable(name, typeOf(type), null, null);
locals.put(name, lv);
}
for (Statement statement : testMethod.getStatements()) {
visitStatement(statement, locals, mg, il, factory, cp);
}
il.append(InstructionConstants.RETURN);
mg.setMaxStack();
cg.addMethod(mg.getMethod());
il.dispose();
}
private void visitStatement(
Statement statement,
Map<String, LocalVariableGen> locals,
MethodGen mg,
InstructionList il,
InstructionFactory factory,
ConstantPoolGen cp) {
statement.<Void>accept(
isStatement -> {
visitIsStatement(isStatement, locals, il, factory, cp);
return null;
},
isNotStatement -> {
visitIsNotStatement(isNotStatement, locals, il, factory, cp);
return null;
},
varInitStatement -> {
visitVarInitStatement(varInitStatement, locals, il, factory, cp);
return null;
},
returnIsStatement -> { return null; },
returnIsNotStatement -> { return null; },
invokeVoidStatement -> { return null; },
tryStatement -> {
InstructionHandle startTry = il.append(InstructionFactory.NOP);
for (Statement s : tryStatement.getTryStatements()) {
visitStatement(s, locals, mg, il, factory, cp);
}
InstructionHandle endTry = il.append(InstructionFactory.NOP);
BranchInstruction goto_ = InstructionFactory.createBranchInstruction(Constants.GOTO, null);
il.append(goto_);
List<BranchInstruction> catchExits = new ArrayList<>();
for (CatchClause catchClause : tryStatement.getCatchClauses()) {
LocalVariableGen ex = locals.get(catchClause.getVar().getName());
InstructionHandle startCatch = il.append(
InstructionFactory.createStore(ex.getType(), ex.getIndex()));
mg.addExceptionHandler(
startTry,
endTry,
startCatch,
(ObjectType)typeOf(catchClause.getClassType().toType()));
for (Statement s : catchClause.getStatements()) {
this.visitStatement(s, locals, mg, il, factory, cp);
}
BranchInstruction exitCatch = InstructionFactory.createBranchInstruction(Constants.GOTO, null);
il.append(exitCatch);
catchExits.add(exitCatch);
}
InstructionHandle startFinally = il.append(InstructionFactory.NOP);
for (Statement s : tryStatement.getFinallyStatements()) {
visitStatement(s, locals, mg, il, factory, cp);
}
goto_.setTarget(startFinally);
for (BranchInstruction bi : catchExits) {
bi.setTarget(startFinally);
}
return null;
},
ifStatement -> {
LocalVariableGen lv = locals.get(ifStatement.getCond().getName());
il.append(InstructionFactory.createLoad(lv.getType(), lv.getIndex()));
// if
BranchInstruction ifeq = InstructionFactory.createBranchInstruction(Constants.IFEQ, null);
il.append(ifeq);
// then
for (Statement thenStatement : ifStatement.getThen()) {
visitStatement(thenStatement, locals, mg, il, factory, cp);
}
BranchInstruction goto_ = InstructionFactory.createBranchInstruction(Constants.GOTO, null);
il.append(goto_);
// else
InstructionHandle else_ = il.append(InstructionFactory.NOP);
for (Statement elseStatement : ifStatement.getOtherwise()) {
visitStatement(elseStatement, locals, mg, il, factory, cp);
}
InstructionHandle fi = il.append(InstructionFactory.NOP);
// tie the knot
ifeq.setTarget(else_);
goto_.setTarget(fi);
return null;
}
);
}
private void visitIsStatement(
IsStatement isStatement,
Map<String, LocalVariableGen> locals,
InstructionList il,
InstructionFactory factory,
ConstantPoolGen cp) {
LocalVariableGen subject = locals.get(isStatement.getSubject().getName());
LocalVariableGen complement = locals.get(isStatement.getComplement().getName());
il.append(InstructionFactory.createLoad(subject.getType(), subject.getIndex()));
il.append(InstructionFactory.createLoad(complement.getType(), complement.getIndex()));
il.append(
factory.createInvoke(
"org.junit.Assert",
"assertThat",
Type.VOID,
new Type[] { Type.OBJECT, new ObjectType("org.hamcrest.Matcher") },
Constants.INVOKESTATIC));
}
private void visitIsNotStatement(
IsNotStatement isNotStatement,
Map<String, LocalVariableGen> locals,
InstructionList il,
InstructionFactory factory,
ConstantPoolGen cp) {
LocalVariableGen subject = locals.get(isNotStatement.getSubject().getName());
LocalVariableGen complement = locals.get(isNotStatement.getComplement().getName());
il.append(InstructionFactory.createLoad(subject.getType(), subject.getIndex()));
il.append(InstructionFactory.createLoad(complement.getType(), complement.getIndex()));
il.append(
factory.createInvoke(
"org.hamcrest.CoreMatchers",
"not",
new ObjectType("org.hamcrest.Matcher"),
new Type[] { complement.getType() },
Constants.INVOKESTATIC));
il.append(
factory.createInvoke(
"org.junit.Assert",
"assertThat",
Type.VOID,
new Type[] { Type.OBJECT, new ObjectType("org.hamcrest.Matcher") },
Constants.INVOKESTATIC));
}
private void visitVarInitStatement(
VarInitStatement varInitStatement,
Map<String, LocalVariableGen> locals,
InstructionList il,
InstructionFactory factory,
ConstantPoolGen cp) {
LocalVariableGen var = locals.get(varInitStatement.getName());
Type type = typeOf(varInitStatement.getType());
varInitStatement.getValue().<Void>accept(
varExpr -> {
LocalVariableGen from = locals.get(varExpr.getName());
il.append(InstructionFactory.createLoad(from.getType(), from.getIndex()));
il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
return null;
},
instanceOfMatcherExpr -> {
il.append(new PUSH(cp, new ObjectType(instanceOfMatcherExpr.getClassName())));
il.append(factory.createInvoke(
"org.hamcrest.CoreMatchers",
"instanceOf",
new ObjectType("org.hamcrest.Matcher"),
new Type[] { new ObjectType("java.lang.Class") },
Constants.INVOKESTATIC));
il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
return null;
},
nullValueMatcherExpr -> {
il.append(factory.createInvoke(
"org.hamcrest.CoreMatchers",
"nullValue",
new ObjectType("org.hamcrest.Matcher"),
Type.NO_ARGS,
Constants.INVOKESTATIC));
il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
return null;
},
conjunctionMatcherExpr -> {
List<Var> matchers = conjunctionMatcherExpr.getMatchers();
int numMatchers = matchers.size();
// first create array
il.append(new PUSH(cp, numMatchers));
il.append(factory.createNewArray(new ObjectType("org.hamcrest.Matcher"), (short) 1));
// fill the array with the matchers
for (int i = 0; i < numMatchers; i++) {
String name = matchers.get(i).getName();
LocalVariableGen lv = locals.get(name);
il.append(InstructionConstants.DUP);
il.append(new PUSH(cp, i));
il.append(InstructionFactory.createLoad(lv.getType(), lv.getIndex()));
il.append(InstructionConstants.AASTORE);
}
// then call allOf with the array(=vararg)
il.append(factory.createInvoke(
"org.hamcrest.CoreMatchers",
"allOf",
new ObjectType("org.hamcrest.Matcher"),
new Type[] { new ArrayType(new ObjectType("org.hamcrest.Matcher"), 1) },
Constants.INVOKESTATIC));
il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
return null;
},
equalToMatcherExpr -> {
Var operand = equalToMatcherExpr.getOperand();
LocalVariableGen lv = locals.get(operand.getName());
il.append(InstructionFactory.createLoad(lv.getType(), lv.getIndex()));
il.append(factory.createInvoke(
"org.hamcrest.CoreMatchers",
"is",
new ObjectType("org.hamcrest.Matcher"),
new Type[] { lv.getType() },
Constants.INVOKESTATIC));
il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
return null;
},
suchThatMatcherExpr -> { return null; },
newExpr -> {
il.append(factory.createNew(newExpr.getType()));
il.append(InstructionConstants.DUP);
il.append(factory.createInvoke(
newExpr.getType(),
"<init>",
Type.VOID,
Type.NO_ARGS,
Constants.INVOKESPECIAL));
il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
return null;
},
strLitExpr -> {
il.append(new PUSH(cp, strLitExpr.getText()));
il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
return null;
},
nullExpr -> {
il.append(InstructionConstants.ACONST_NULL);
il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
return null;
},
invokeExpr -> {
// first push target object
LocalVariableGen object = locals.get(invokeExpr.getObject().getName());
il.append(InstructionFactory.createLoad(object.getType(), object.getIndex()));
// push arguments
for (Var arg : invokeExpr.getArgs()) {
LocalVariableGen lv = locals.get(arg.getName());
il.append(InstructionFactory.createLoad(lv.getType(), lv.getIndex()));
}
// then call method
il.append(factory.createInvoke(
object.getType().toString(), // TODO: ?
invokeExpr.getMethodName(),
typeOf(invokeExpr.getReturnType()),
invokeExpr.getArgTypes().stream()
.map(BcelJUnitAstCompiler::typeOf)
.collect(Collectors.toList())
.toArray(new Type[]{}),
Constants.INVOKEVIRTUAL));
il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
return null;
},
invokeStaticExpr -> {
for (Var arg : invokeStaticExpr.getArgs()) {
LocalVariableGen lv = locals.get(arg.getName());
il.append(InstructionFactory.createLoad(lv.getType(), lv.getIndex()));
}
il.append(factory.createInvoke(
invokeStaticExpr.getClazz().getText(),
invokeStaticExpr.getMethodName(),
typeOf(invokeStaticExpr.getReturnType()),
invokeStaticExpr.getArgTypes().stream()
.map(BcelJUnitAstCompiler::typeOf)
.collect(Collectors.toList())
.toArray(new Type[]{}),
Constants.INVOKESTATIC));
il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
return null;
},
intLitExpr -> {
il.append(new PUSH(cp, intLitExpr.getValue()));
il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
return null;
},
classLitExpr -> {
il.append(new PUSH(cp, new ObjectType(classLitExpr.getType().getText())));
il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
return null;
},
equalOpExpr -> {
LocalVariableGen lhs = locals.get(equalOpExpr.getLhs().getName());
il.append(InstructionFactory.createLoad(lhs.getType(), lhs.getIndex()));
LocalVariableGen rhs = locals.get(equalOpExpr.getRhs().getName());
il.append(InstructionFactory.createLoad(rhs.getType(), rhs.getIndex()));
// if
BranchInstruction if_acmpne = InstructionFactory.createBranchInstruction(Constants.IF_ACMPNE, null);
il.append(if_acmpne);
// then
il.append(new PUSH(cp, true));
BranchInstruction goto_ = InstructionFactory.createBranchInstruction(Constants.GOTO, null);
il.append(goto_);
// else
InstructionHandle else_ = il.append(new PUSH(cp, false));
InstructionHandle store = il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
// tie the knot
if_acmpne.setTarget(else_);
goto_.setTarget(store);
return null;
});
}
static Type typeOf(yokohama.unit.ast_junit.Type type) {
int dims = type.getDims();
if (dims == 0) {
return type.getNonArrayType().accept(
primitiveType -> {
Kind kind = primitiveType.getKind();
switch (kind) {
case BOOLEAN: return Type.BOOLEAN;
case BYTE: return Type.BYTE;
case SHORT: return Type.SHORT;
case INT: return Type.INT;
case LONG: return Type.LONG;
case CHAR: return Type.CHAR;
case FLOAT: return Type.FLOAT;
case DOUBLE: return Type.DOUBLE;
}
throw new RuntimeException("should not reach here");
},
classType -> new ObjectType(classType.getText()));
} else {
return new ArrayType(
typeOf(new yokohama.unit.ast_junit.Type(type.getNonArrayType() , 0)),
dims);
}
}
}
| Generate INVOKEINTERFACE for interface method invocation | src/main/java/yokohama/unit/translator/BcelJUnitAstCompiler.java | Generate INVOKEINTERFACE for interface method invocation | <ide><path>rc/main/java/yokohama/unit/translator/BcelJUnitAstCompiler.java
<ide> import yokohama.unit.ast.Kind;
<ide> import yokohama.unit.ast_junit.CatchClause;
<ide> import yokohama.unit.ast_junit.CompilationUnit;
<add>import yokohama.unit.ast_junit.InvokeExpr;
<ide> import yokohama.unit.ast_junit.IsNotStatement;
<ide> import yokohama.unit.ast_junit.IsStatement;
<ide> import yokohama.unit.ast_junit.Statement;
<ide> .map(BcelJUnitAstCompiler::typeOf)
<ide> .collect(Collectors.toList())
<ide> .toArray(new Type[]{}),
<del> Constants.INVOKEVIRTUAL));
<add> invokeExpr.getInstruction() == InvokeExpr.Instruction.VIRTUAL
<add> ? Constants.INVOKEVIRTUAL
<add> : Constants.INVOKEINTERFACE));
<ide>
<ide> il.append(InstructionFactory.createStore(var.getType(), var.getIndex()));
<ide> return null; |
|
Java | apache-2.0 | 799a65c2acb6d89ca4b560b98c1565c4de58b1a0 | 0 | marques-work/gocd,ketan/gocd,ketan/gocd,gocd/gocd,gocd/gocd,Skarlso/gocd,GaneshSPatil/gocd,Skarlso/gocd,ketan/gocd,marques-work/gocd,GaneshSPatil/gocd,Skarlso/gocd,gocd/gocd,gocd/gocd,ketan/gocd,marques-work/gocd,GaneshSPatil/gocd,marques-work/gocd,Skarlso/gocd,ketan/gocd,gocd/gocd,ketan/gocd,GaneshSPatil/gocd,gocd/gocd,GaneshSPatil/gocd,Skarlso/gocd,GaneshSPatil/gocd,marques-work/gocd,Skarlso/gocd,marques-work/gocd | /*
* Copyright 2020 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.database.h2;
import com.thoughtworks.go.server.database.DbProperties;
import org.apache.commons.dbcp2.BasicDataSource;
import org.apache.commons.lang3.StringUtils;
import java.io.File;
public class DefaultH2DataSource {
public static BasicDataSource defaultH2DataSource(BasicDataSource basicDataSource, DbProperties properties) {
File defaultDbDir = new File("db/h2db");
if (!defaultDbDir.exists()) {
defaultDbDir.mkdirs();
}
String defaultCacheSizeInMB = String.valueOf(128 * 1024); //128MB
String defaultH2Url = "jdbc:h2:./db/h2db/cruise" +
";DB_CLOSE_DELAY=-1" +
";DB_CLOSE_ON_EXIT=FALSE" + // do not close the DB on JVM exit
";CACHE_SIZE=" + defaultCacheSizeInMB +
";TRACE_MAX_FILE_SIZE=16" + // http://www.h2database.com/html/features.html#trace_options
";TRACE_LEVEL_FILE=1" // http://www.h2database.com/html/features.html#trace_options
;
basicDataSource.setUrl(defaultH2Url);
basicDataSource.setUsername(StringUtils.defaultIfBlank(properties.user(), "sa"));
basicDataSource.setPassword(StringUtils.stripToEmpty(properties.password()));
basicDataSource.setMaxIdle(properties.maxIdle());
basicDataSource.setMaxTotal(properties.maxTotal());
return basicDataSource;
}
}
| db-support/db-support-base/src/main/java/com/thoughtworks/go/server/database/h2/DefaultH2DataSource.java | /*
* Copyright 2020 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.database.h2;
import com.thoughtworks.go.server.database.DbProperties;
import org.apache.commons.dbcp2.BasicDataSource;
import org.apache.commons.lang3.StringUtils;
import java.io.File;
public class DefaultH2DataSource {
public static BasicDataSource defaultH2DataSource(BasicDataSource basicDataSource, DbProperties properties) {
File defaultDbDir = new File("db/h2db");
if (!defaultDbDir.exists()) {
defaultDbDir.mkdirs();
}
String defaultCacheSizeInMB = String.valueOf(128 * 1024); //128MB
String defaultH2Url = "jdbc:h2:./db/h2db/cruise" +
";DB_CLOSE_DELAY=-1" +
";DB_CLOSE_ON_EXIT=FALSE" + // do not close the DB on JVM exit
// ";MVCC=TRUE" +
";CACHE_SIZE=" + defaultCacheSizeInMB +
";TRACE_MAX_FILE_SIZE=16" + // http://www.h2database.com/html/features.html#trace_options
";TRACE_LEVEL_FILE=1" // http://www.h2database.com/html/features.html#trace_options
;
basicDataSource.setUrl(defaultH2Url);
basicDataSource.setUsername(StringUtils.defaultIfBlank(properties.user(), "sa"));
basicDataSource.setPassword(StringUtils.stripToEmpty(properties.password()));
return basicDataSource;
}
}
| Set default connection pool settings for h2 datasource #000
* DefaultH2DataSource maxIdle and maxTotal defaults to 32 connections.
This is same as the defaults we had prior to adding support for
multiple databases.
* H2 db url no longer supports MVCC flag, hence removing it.
Check H2 1.4.200 changelog - https://github.com/h2database/h2database/releases/tag/version-1.4.200
| db-support/db-support-base/src/main/java/com/thoughtworks/go/server/database/h2/DefaultH2DataSource.java | Set default connection pool settings for h2 datasource #000 | <ide><path>b-support/db-support-base/src/main/java/com/thoughtworks/go/server/database/h2/DefaultH2DataSource.java
<ide> String defaultH2Url = "jdbc:h2:./db/h2db/cruise" +
<ide> ";DB_CLOSE_DELAY=-1" +
<ide> ";DB_CLOSE_ON_EXIT=FALSE" + // do not close the DB on JVM exit
<del>// ";MVCC=TRUE" +
<ide> ";CACHE_SIZE=" + defaultCacheSizeInMB +
<ide> ";TRACE_MAX_FILE_SIZE=16" + // http://www.h2database.com/html/features.html#trace_options
<ide> ";TRACE_LEVEL_FILE=1" // http://www.h2database.com/html/features.html#trace_options
<ide> basicDataSource.setUrl(defaultH2Url);
<ide> basicDataSource.setUsername(StringUtils.defaultIfBlank(properties.user(), "sa"));
<ide> basicDataSource.setPassword(StringUtils.stripToEmpty(properties.password()));
<add> basicDataSource.setMaxIdle(properties.maxIdle());
<add> basicDataSource.setMaxTotal(properties.maxTotal());
<ide> return basicDataSource;
<ide> }
<ide> } |
|
Java | apache-2.0 | df74971d64943f589559e6cdac2d4f606c5d76ae | 0 | Jonathan727/javarosa,Jonathan727/javarosa,Jonathan727/javarosa | package org.javarosa.core.model.instance;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.Enumeration;
import java.util.Vector;
import org.javarosa.core.model.Constants;
import org.javarosa.core.model.FormElementStateListener;
import org.javarosa.core.model.condition.Constraint;
import org.javarosa.core.model.data.IAnswerData;
import org.javarosa.core.model.instance.utils.ITreeVisitor;
import org.javarosa.core.util.externalizable.DeserializationException;
import org.javarosa.core.util.externalizable.ExtUtil;
import org.javarosa.core.util.externalizable.ExtWrapList;
import org.javarosa.core.util.externalizable.ExtWrapNullable;
import org.javarosa.core.util.externalizable.ExtWrapTagged;
import org.javarosa.core.util.externalizable.Externalizable;
import org.javarosa.core.util.externalizable.PrototypeFactory;
/**
* An element of a DataModelTree.
*
* @author Clayton Sims
*
*/
public class TreeElement implements Externalizable {
protected String name; //can be null only for hidden root node
protected int multiplicity;
protected Vector attributes;
public boolean repeatable;
// public boolean isAttribute; for when we support xml attributes as data nodes
private IAnswerData value;
private Vector children;
/* model properties */
public int dataType = Constants.DATATYPE_NULL;
public boolean relevant = true; //do not read this directly unless you know what you're doing!! use isRelevant() if not
public boolean required = false;
public boolean enabled = true; //do not read this directly unless you know what you're doing!! use isEnabled() if not
public Constraint constraint = null;
public String preloadHandler = null;
public String preloadParams = null;
public boolean relevantInherited = true;
public boolean enabledInherited = true;
Vector observers;
public TreeElement () {
this(null, 0);
}
public TreeElement (String name) {
this(name, 0);
}
public TreeElement (String name, int multiplicity) {
this.name = name;
this.multiplicity = multiplicity;
attributes = null;
repeatable = false;
value = null;
children = null;
}
public boolean isLeaf () {
return (children == null);
}
public boolean isChildable () {
return (value == null);
}
public String getName () {
return name;
}
public void setName (String name) {
this.name = name;
}
public int getMult() {
return multiplicity;
}
public void setMult (int multiplicity) {
this.multiplicity = multiplicity;
}
public IAnswerData getValue() {
return value;
}
public void setValue(IAnswerData value) {
if (isLeaf()) {
this.value = value;
} else {
throw new RuntimeException("Can't set data value for node that has children!");
}
}
//may return null! this vector should not be manipulated outside of this class! (namely, don't delete stuff)
public Vector getChildren () {
return children;
}
public int getNumChildren () {
return (children == null ? 0 : children.size());
}
public TreeElement getChild (String name, int multiplicity) {
if (children == null) {
return null;
} else {
for (int i = 0; i < children.size(); i++) {
TreeElement child = (TreeElement)children.elementAt(i);
if (name.equals(child.getName()) && child.getMult() == multiplicity)
return child;
}
return null;
}
}
public Vector getChild (String name) {
return getChild(name, false);
}
public Vector getChild (String name, boolean includeTemplate) {
Vector v = new Vector();
if (children != null) {
for (int i = 0; i < children.size(); i++) {
TreeElement child = (TreeElement)children.elementAt(i);
if (child.getName().equals(name) && (includeTemplate || child.multiplicity != TreeReference.INDEX_TEMPLATE))
v.addElement(child);
}
}
return v;
}
public void addChild (TreeElement child) {
addChild(child, false);
}
public void addChild(TreeElement child, boolean checkDuplicate) {
if (children == null) {
if (isChildable()) {
children = new Vector();
} else {
throw new RuntimeException("Can't add children to node that has data value!");
}
}
if (child.multiplicity == TreeReference.INDEX_UNBOUND) {
throw new RuntimeException("Cannot add child with an unbound index!");
}
if (checkDuplicate) {
TreeElement existingChild = getChild(child.name, child.multiplicity);
if (existingChild != null) {
throw new RuntimeException("Attempted to add duplicate child!");
}
}
//try to keep things in order
int i = children.size();
if (child.getMult() == TreeReference.INDEX_TEMPLATE) {
TreeElement anchor = getChild(child.getName(), 0);
if (anchor != null)
i = children.indexOf(anchor);
} else {
TreeElement anchor = getChild(child.getName(), (child.getMult() == 0 ? TreeReference.INDEX_TEMPLATE : child.getMult() - 1));
if (anchor != null)
i = children.indexOf(anchor) + 1;
}
children.insertElementAt(child, i);
child.setRelevant(isRelevant(), true);
child.setEnabled(isEnabled(), true);
}
public void removeChild(TreeElement child) {
children.removeElement(child);
nullChildren();
}
public void removeChild (String name, int multiplicity) {
TreeElement child = getChild(name, multiplicity);
if (child != null) {
removeChild(child);
}
}
public void removeChildren (String name) {
removeChildren(name, false);
}
public void removeChildren (String name, boolean includeTemplate) {
Vector v = getChild(name, includeTemplate);
for (int i = 0; i < v.size(); i++) {
removeChild((TreeElement)v.elementAt(i));
}
}
public void removeChildAt (int i) {
children.removeElementAt(i);
nullChildren();
}
private void nullChildren () {
if (children.size() == 0)
children = null;
}
public int getChildMultiplicity (String name) {
return getChild(name, false).size();
}
public TreeElement shallowCopy () {
TreeElement newNode = new TreeElement(name, multiplicity);
newNode.repeatable = repeatable;
newNode.dataType = dataType;
newNode.relevant = relevant;
newNode.required = required;
newNode.enabled = enabled;
newNode.constraint = constraint;
newNode.preloadHandler = preloadHandler;
newNode.preloadParams = preloadParams;
newNode.setAttributesFromSingleStringVector(getSingleStringAttributeVector());
if(value != null) {
newNode.value = value.clone();
}
newNode.children = children;
return newNode;
}
public TreeElement deepCopy (boolean includeTemplates) {
TreeElement newNode = shallowCopy();
newNode.children = null;
for (int i = 0; i < getNumChildren(); i++) {
TreeElement child = (TreeElement)children.elementAt(i);
if (includeTemplates || child.getMult() != TreeReference.INDEX_TEMPLATE) {
newNode.addChild(child.deepCopy(includeTemplates));
}
}
return newNode;
}
/* ==== MODEL PROPERTIES ==== */
//factoring inheritance rules
public boolean isRelevant () {
return relevantInherited && relevant;
}
//factoring in inheritance rules
public boolean isEnabled () {
return enabledInherited && enabled;
}
/* ==== SPECIAL SETTERS (SETTERS WITH SIDE-EFFECTS) ==== */
public boolean setAnswer (IAnswerData answer) {
if (value != null || answer != null) {
setValue(answer);
alertStateObservers(FormElementStateListener.CHANGE_DATA);
return true;
} else {
return false;
}
}
public void setRequired (boolean required) {
if (this.required != required) {
this.required = required;
alertStateObservers(FormElementStateListener.CHANGE_REQUIRED);
}
}
public void setRelevant (boolean relevant) {
setRelevant(relevant, false);
}
public void setRelevant (boolean relevant, boolean inherited) {
boolean oldRelevancy = isRelevant();
if (inherited) {
this.relevantInherited = relevant;
} else {
this.relevant = relevant;
}
if (isRelevant() != oldRelevancy) {
for (int i = 0; i < getNumChildren(); i++) {
((TreeElement)children.elementAt(i)).setRelevant(isRelevant(), true);
}
alertStateObservers(FormElementStateListener.CHANGE_RELEVANT);
}
}
public void setEnabled (boolean enabled) {
setEnabled(enabled, false);
}
public void setEnabled (boolean enabled, boolean inherited) {
boolean oldEnabled = isEnabled();
if (inherited) {
this.enabledInherited = enabled;
} else {
this.enabled = enabled;
}
if (isEnabled() != oldEnabled) {
for (int i = 0; i < getNumChildren(); i++) {
((TreeElement)children.elementAt(i)).setEnabled(isEnabled(), true);
}
alertStateObservers(FormElementStateListener.CHANGE_ENABLED);
}
}
/* ==== OBSERVER PATTERN ==== */
public void registerStateObserver (FormElementStateListener qsl) {
if (observers == null)
observers = new Vector();
if (!observers.contains(qsl)) {
observers.addElement(qsl);
}
}
public void unregisterStateObserver (FormElementStateListener qsl) {
if (observers != null) {
observers.removeElement(qsl);
if (observers.isEmpty())
observers = null;
}
}
public void unregisterAll () {
observers = null;
}
public void alertStateObservers (int changeFlags) {
if (observers != null) {
for (Enumeration e = observers.elements(); e.hasMoreElements(); )
((FormElementStateListener)e.nextElement()).formElementStateChanged(this, changeFlags);
}
}
/* ==== VISITOR PATTERN ==== */
/**
* Visitor pattern acceptance method.
*
* @param visitor
* The visitor traveling this tree
*/
public void accept(ITreeVisitor visitor) {
visitor.visit(this);
if (children != null) {
Enumeration en = children.elements();
while (en.hasMoreElements()) {
((TreeElement) en.nextElement()).accept(visitor);
}
}
}
/*
* ==== HARD-CODED ATTRIBUTES (delete once we support writable attributes)
* ====
*/
/**
* Returns the number of attributes of this element.
*/
public int getAttributeCount() {
return attributes == null ? 0 : attributes.size ();
}
/**
* get namespace of attribute at 'index' in the vector
* @param index
* @return String
*/
public String getAttributeNamespace (int index) {
return ((String []) attributes.elementAt (index)) [0];
}
/**
* get name of attribute at 'index' in the vector
* @param index
* @return String
*/
public String getAttributeName (int index) {
return ((String []) attributes.elementAt (index)) [1];
}
/**
* get value of attribute at 'index' in the vector
* @param index
* @return String
*/
public String getAttributeValue (int index) {
return ((String []) attributes.elementAt (index)) [2];
}
/**
* get value of attribute with namespace:name' in the vector
* @param index
* @return String
*/
public String getAttributeValue (String namespace, String name) {
for (int i = 0; i < getAttributeCount (); i++) {
if (name.equals (getAttributeName (i))
&& (namespace == null || namespace.equals (getAttributeNamespace(i)))) {
return getAttributeValue (i);
}
}
return null;
}
/**
* Sets the given attribute; a value of null removes the attribute
*
*
* */
public void setAttribute (String namespace, String name, String value) {
if (attributes == null)
attributes = new Vector ();
if (namespace == null)
namespace = "";
for (int i = attributes.size()-1; i >=0; i--){
String[] attribut = (String[]) attributes.elementAt(i);
if (attribut[0].equals(namespace) &&
attribut[1].equals(name)){
if (value == null) {
attributes.removeElementAt(i);
}
else {
attribut[2] = value;
}
return;
}
}
attributes.addElement
(new String [] {namespace, name, value});
}
/**
* A method for producing a vector of single strings - from the current
* attribute vector of string [] arrays.
* @return
*/
public Vector getSingleStringAttributeVector(){
Vector strings = new Vector();
if (attributes == null)
return null;
else{
for(int i =0; i<this.attributes.size();i++){
String [] array = (String [])attributes.elementAt(i);
if (array[0]==null || array[0]=="")
strings.addElement(new String(array[1]+"="+array[2]));
else
strings.addElement(new String(array[0]+":"+array[1]+"="+array[2]));
}
return strings;
}
}
/**
* Method to repopulate the attribute vector from a vector of singleStrings
* @param attStrings
*/
public void setAttributesFromSingleStringVector(Vector attStrings){
Vector stringArrays = new Vector();
if (attStrings == null)
attributes = null;
else{
this.attributes = new Vector();
for(int i =0; i<attStrings.size();i++){
String att = (String)attStrings.elementAt(i);
String [] array = new String [3];
int start = 0;
// get namespace
int pos = att.indexOf(":");
if (pos == -1){
array[0]=null;
start = 0;
}
else{
array[0]=att.substring(start, pos);
start = ++pos;
}
// get attribute name
pos = att.indexOf("=");
array[1]=att.substring(start,pos);
start = ++pos;
array[2]= att.substring(start);
this.setAttribute(array[0], array[1], array[2]);
}
}
}
/* ==== SERIALIZATION ==== */
/* TODO:
*
* this new serialization scheme is kind of lame. ideally, we shouldn't have to sub-class TreeElement at all; we
* should have an API that can seamlessly represent complex data model objects (like weight history or immunizations) as
* if they were explicity XML subtrees underneath the parent TreeElement
*
* failing that, we should wrap this scheme in an ExternalizableWrapper
*/
/*
* (non-Javadoc)
*
* @see org.javarosa.core.services.storage.utilities.Externalizable#readExternal(java.io.DataInputStream)
*/
public void readExternal(DataInputStream in, PrototypeFactory pf) throws IOException, DeserializationException {
name = ExtUtil.nullIfEmpty(ExtUtil.readString(in));
multiplicity = ExtUtil.readInt(in);
repeatable = ExtUtil.readBool(in);
value = (IAnswerData)ExtUtil.read(in, new ExtWrapNullable(new ExtWrapTagged()), pf);
//children = ExtUtil.nullIfEmpty((Vector)ExtUtil.read(in, new ExtWrapList(TreeElement.class), pf));
//Jan 22, 2009 - [email protected]
//old line: children = ExtUtil.nullIfEmpty((Vector)ExtUtil.read(in, new ExtWrapList(TreeElement.class), pf));
//New Child deserialization
//1. read null status as boolean
//2. read number of children
//3. for i < number of children
//3.1 if read boolean true , then create TreeElement and deserialize directly.
//3.2 if read boolean false then create tagged element and deserialize child
if(!ExtUtil.readBool(in)) {
//1.
children = null;
} else {
children = new Vector();
//2.
int numChildren = (int) ExtUtil.readNumeric(in);
//3.
for(int i = 0 ; i < numChildren ; ++i) {
boolean normal = ExtUtil.readBool(in);
if(normal) {
//3.1
TreeElement child = new TreeElement();
child.readExternal(in, pf);
children.addElement(child);
} else {
//3.2
TreeElement child = (TreeElement)ExtUtil.read(in, new ExtWrapTagged(), pf);
children.addElement(child);
}
}
}
//end Jan 22, 2009
dataType = ExtUtil.readInt(in);
relevant = ExtUtil.readBool(in);
required = ExtUtil.readBool(in);
enabled = ExtUtil.readBool(in);
relevantInherited = ExtUtil.readBool(in);
enabledInherited = ExtUtil.readBool(in);
constraint = (Constraint)ExtUtil.read(in, new ExtWrapNullable(Constraint.class), pf);
preloadHandler = ExtUtil.nullIfEmpty(ExtUtil.readString(in));
preloadParams = ExtUtil.nullIfEmpty(ExtUtil.readString(in));
Vector attStrings = ExtUtil.nullIfEmpty((Vector)ExtUtil.read(in, new ExtWrapList(String.class), pf));
setAttributesFromSingleStringVector(attStrings);
}
/*
* (non-Javadoc)
*
* @see org.javarosa.core.services.storage.utilities.Externalizable#writeExternal(java.io.DataOutputStream)
*/
public void writeExternal(DataOutputStream out) throws IOException {
ExtUtil.writeString(out, ExtUtil.emptyIfNull(name));
ExtUtil.writeNumeric(out, multiplicity);
ExtUtil.writeBool(out, repeatable);
ExtUtil.write(out, new ExtWrapNullable(value == null ? null : new ExtWrapTagged(value)));
//Jan 22, 2009 - [email protected]
//old line: ExtUtil.write(out, new ExtWrapList(ExtUtil.emptyIfNull(children)));
//New Child serialization
//1. write null status as boolean
//2. write number of children
//3. for all child in children
//3.1 if child type == TreeElement write boolean true , then serialize directly.
//3.2 if child type != TreeElement, write boolean false, then tagged child
if(children == null) {
//1.
ExtUtil.writeBool(out, false);
} else {
//1.
ExtUtil.writeBool(out, true);
//2.
ExtUtil.writeNumeric(out, children.size());
//3.
Enumeration en = children.elements();
while(en.hasMoreElements()) {
TreeElement child = (TreeElement)en.nextElement();
if(child.getClass() == TreeElement.class) {
//3.1
ExtUtil.writeBool(out, true);
child.writeExternal(out);
} else {
//3.2
ExtUtil.writeBool(out, false);
ExtUtil.write(out, new ExtWrapTagged(child));
}
}
}
//end Jan 22, 2009
ExtUtil.writeNumeric(out, dataType);
ExtUtil.writeBool(out, relevant);
ExtUtil.writeBool(out, required);
ExtUtil.writeBool(out, enabled);
ExtUtil.writeBool(out, relevantInherited);
ExtUtil.writeBool(out, enabledInherited);
ExtUtil.write(out, new ExtWrapNullable(constraint)); //TODO: inefficient for repeats
ExtUtil.writeString(out, ExtUtil.emptyIfNull(preloadHandler));
ExtUtil.writeString(out, ExtUtil.emptyIfNull(preloadParams));
Vector attStrings = getSingleStringAttributeVector();
ExtUtil.write(out, new ExtWrapList(ExtUtil.emptyIfNull(attStrings)));
}
} | javarosa/org.javarosa.core.model/src/org/javarosa/core/model/instance/TreeElement.java | package org.javarosa.core.model.instance;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.Enumeration;
import java.util.Vector;
import org.javarosa.core.model.Constants;
import org.javarosa.core.model.FormElementStateListener;
import org.javarosa.core.model.condition.Constraint;
import org.javarosa.core.model.data.IAnswerData;
import org.javarosa.core.model.instance.utils.ITreeVisitor;
import org.javarosa.core.util.externalizable.DeserializationException;
import org.javarosa.core.util.externalizable.ExtUtil;
import org.javarosa.core.util.externalizable.ExtWrapList;
import org.javarosa.core.util.externalizable.ExtWrapNullable;
import org.javarosa.core.util.externalizable.ExtWrapTagged;
import org.javarosa.core.util.externalizable.Externalizable;
import org.javarosa.core.util.externalizable.PrototypeFactory;
/**
* An element of a DataModelTree.
*
* @author Clayton Sims
*
*/
public class TreeElement implements Externalizable {
protected String name; //can be null only for hidden root node
protected int multiplicity;
protected Vector attributes;
public boolean repeatable;
// public boolean isAttribute; for when we support xml attributes as data nodes
private IAnswerData value;
private Vector children;
/* model properties */
public int dataType = Constants.DATATYPE_NULL;
public boolean relevant = true; //do not read this directly unless you know what you're doing!! use isRelevant() if not
public boolean required = false;
public boolean enabled = true; //do not read this directly unless you know what you're doing!! use isEnabled() if not
public Constraint constraint = null;
public String preloadHandler = null;
public String preloadParams = null;
public boolean relevantInherited = true;
public boolean enabledInherited = true;
Vector observers;
public TreeElement () {
this(null, 0);
}
public TreeElement (String name) {
this(name, 0);
}
public TreeElement (String name, int multiplicity) {
this.name = name;
this.multiplicity = multiplicity;
attributes = null;
repeatable = false;
value = null;
children = null;
}
public boolean isLeaf () {
return (children == null);
}
public boolean isChildable () {
return (value == null);
}
public String getName () {
return name;
}
public void setName (String name) {
this.name = name;
}
public int getMult() {
return multiplicity;
}
public void setMult (int multiplicity) {
this.multiplicity = multiplicity;
}
public IAnswerData getValue() {
return value;
}
public void setValue(IAnswerData value) {
if (isLeaf()) {
this.value = value;
} else {
throw new RuntimeException("Can't set data value for node that has children!");
}
}
//may return null! this vector should not be manipulated outside of this class! (namely, don't delete stuff)
public Vector getChildren () {
return children;
}
public int getNumChildren () {
return (children == null ? 0 : children.size());
}
public TreeElement getChild (String name, int multiplicity) {
if (children == null) {
return null;
} else {
for (int i = 0; i < children.size(); i++) {
TreeElement child = (TreeElement)children.elementAt(i);
if (name.equals(child.getName()) && child.getMult() == multiplicity)
return child;
}
return null;
}
}
public Vector getChild (String name) {
return getChild(name, false);
}
public Vector getChild (String name, boolean includeTemplate) {
Vector v = new Vector();
if (children != null) {
for (int i = 0; i < children.size(); i++) {
TreeElement child = (TreeElement)children.elementAt(i);
if (child.getName().equals(name) && (includeTemplate || child.multiplicity != TreeReference.INDEX_TEMPLATE))
v.addElement(child);
}
}
return v;
}
public void addChild (TreeElement child) {
addChild(child, false);
}
public void addChild(TreeElement child, boolean checkDuplicate) {
if (children == null) {
if (isChildable()) {
children = new Vector();
} else {
throw new RuntimeException("Can't add children to node that has data value!");
}
}
if (child.multiplicity == TreeReference.INDEX_UNBOUND) {
throw new RuntimeException("Cannot add child with an unbound index!");
}
if (checkDuplicate) {
TreeElement existingChild = getChild(child.name, child.multiplicity);
if (existingChild != null) {
throw new RuntimeException("Attempted to add duplicate child!");
}
}
//try to keep things in order
int i = children.size();
if (child.getMult() == TreeReference.INDEX_TEMPLATE) {
TreeElement anchor = getChild(child.getName(), 0);
if (anchor != null)
i = children.indexOf(anchor);
} else {
TreeElement anchor = getChild(child.getName(), (child.getMult() == 0 ? TreeReference.INDEX_TEMPLATE : child.getMult() - 1));
if (anchor != null)
i = children.indexOf(anchor) + 1;
}
children.insertElementAt(child, i);
child.setRelevant(isRelevant(), true);
child.setEnabled(isEnabled(), true);
}
public void removeChild(TreeElement child) {
children.removeElement(child);
nullChildren();
}
public void removeChild (String name, int multiplicity) {
TreeElement child = getChild(name, multiplicity);
if (child != null) {
removeChild(child);
}
}
public void removeChildren (String name) {
removeChildren(name, false);
}
public void removeChildren (String name, boolean includeTemplate) {
Vector v = getChild(name, includeTemplate);
for (int i = 0; i < v.size(); i++) {
removeChild((TreeElement)v.elementAt(i));
}
}
public void removeChildAt (int i) {
children.removeElementAt(i);
nullChildren();
}
private void nullChildren () {
if (children.size() == 0)
children = null;
}
public int getChildMultiplicity (String name) {
return getChild(name, false).size();
}
public TreeElement shallowCopy () {
TreeElement newNode = new TreeElement(name, multiplicity);
newNode.repeatable = repeatable;
newNode.dataType = dataType;
newNode.relevant = relevant;
newNode.required = required;
newNode.enabled = enabled;
newNode.constraint = constraint;
newNode.preloadHandler = preloadHandler;
newNode.preloadParams = preloadParams;
newNode.setAttributesFromSingleStringVector(getSingleStringAttributeVector());
if(value != null) {
newNode.value = value.clone();
}
newNode.children = children;
return newNode;
}
public TreeElement deepCopy (boolean includeTemplates) {
TreeElement newNode = shallowCopy();
for (int i = 0; i < getNumChildren(); i++) {
TreeElement child = (TreeElement)children.elementAt(i);
if (includeTemplates || child.getMult() != TreeReference.INDEX_TEMPLATE) {
newNode.addChild(child.deepCopy(includeTemplates));
}
}
return newNode;
}
/* ==== MODEL PROPERTIES ==== */
//factoring inheritance rules
public boolean isRelevant () {
return relevantInherited && relevant;
}
//factoring in inheritance rules
public boolean isEnabled () {
return enabledInherited && enabled;
}
/* ==== SPECIAL SETTERS (SETTERS WITH SIDE-EFFECTS) ==== */
public boolean setAnswer (IAnswerData answer) {
if (value != null || answer != null) {
setValue(answer);
alertStateObservers(FormElementStateListener.CHANGE_DATA);
return true;
} else {
return false;
}
}
public void setRequired (boolean required) {
if (this.required != required) {
this.required = required;
alertStateObservers(FormElementStateListener.CHANGE_REQUIRED);
}
}
public void setRelevant (boolean relevant) {
setRelevant(relevant, false);
}
public void setRelevant (boolean relevant, boolean inherited) {
boolean oldRelevancy = isRelevant();
if (inherited) {
this.relevantInherited = relevant;
} else {
this.relevant = relevant;
}
if (isRelevant() != oldRelevancy) {
for (int i = 0; i < getNumChildren(); i++) {
((TreeElement)children.elementAt(i)).setRelevant(isRelevant(), true);
}
alertStateObservers(FormElementStateListener.CHANGE_RELEVANT);
}
}
public void setEnabled (boolean enabled) {
setEnabled(enabled, false);
}
public void setEnabled (boolean enabled, boolean inherited) {
boolean oldEnabled = isEnabled();
if (inherited) {
this.enabledInherited = enabled;
} else {
this.enabled = enabled;
}
if (isEnabled() != oldEnabled) {
for (int i = 0; i < getNumChildren(); i++) {
((TreeElement)children.elementAt(i)).setEnabled(isEnabled(), true);
}
alertStateObservers(FormElementStateListener.CHANGE_ENABLED);
}
}
/* ==== OBSERVER PATTERN ==== */
public void registerStateObserver (FormElementStateListener qsl) {
if (observers == null)
observers = new Vector();
if (!observers.contains(qsl)) {
observers.addElement(qsl);
}
}
public void unregisterStateObserver (FormElementStateListener qsl) {
if (observers != null) {
observers.removeElement(qsl);
if (observers.isEmpty())
observers = null;
}
}
public void unregisterAll () {
observers = null;
}
public void alertStateObservers (int changeFlags) {
if (observers != null) {
for (Enumeration e = observers.elements(); e.hasMoreElements(); )
((FormElementStateListener)e.nextElement()).formElementStateChanged(this, changeFlags);
}
}
/* ==== VISITOR PATTERN ==== */
/**
* Visitor pattern acceptance method.
*
* @param visitor
* The visitor traveling this tree
*/
public void accept(ITreeVisitor visitor) {
visitor.visit(this);
if (children != null) {
Enumeration en = children.elements();
while (en.hasMoreElements()) {
((TreeElement) en.nextElement()).accept(visitor);
}
}
}
/*
* ==== HARD-CODED ATTRIBUTES (delete once we support writable attributes)
* ====
*/
/**
* Returns the number of attributes of this element.
*/
public int getAttributeCount() {
return attributes == null ? 0 : attributes.size ();
}
/**
* get namespace of attribute at 'index' in the vector
* @param index
* @return String
*/
public String getAttributeNamespace (int index) {
return ((String []) attributes.elementAt (index)) [0];
}
/**
* get name of attribute at 'index' in the vector
* @param index
* @return String
*/
public String getAttributeName (int index) {
return ((String []) attributes.elementAt (index)) [1];
}
/**
* get value of attribute at 'index' in the vector
* @param index
* @return String
*/
public String getAttributeValue (int index) {
return ((String []) attributes.elementAt (index)) [2];
}
/**
* get value of attribute with namespace:name' in the vector
* @param index
* @return String
*/
public String getAttributeValue (String namespace, String name) {
for (int i = 0; i < getAttributeCount (); i++) {
if (name.equals (getAttributeName (i))
&& (namespace == null || namespace.equals (getAttributeNamespace(i)))) {
return getAttributeValue (i);
}
}
return null;
}
/**
* Sets the given attribute; a value of null removes the attribute
*
*
* */
public void setAttribute (String namespace, String name, String value) {
if (attributes == null)
attributes = new Vector ();
if (namespace == null)
namespace = "";
for (int i = attributes.size()-1; i >=0; i--){
String[] attribut = (String[]) attributes.elementAt(i);
if (attribut[0].equals(namespace) &&
attribut[1].equals(name)){
if (value == null) {
attributes.removeElementAt(i);
}
else {
attribut[2] = value;
}
return;
}
}
attributes.addElement
(new String [] {namespace, name, value});
}
/**
* A method for producing a vector of single strings - from the current
* attribute vector of string [] arrays.
* @return
*/
public Vector getSingleStringAttributeVector(){
Vector strings = new Vector();
if (attributes == null)
return null;
else{
for(int i =0; i<this.attributes.size();i++){
String [] array = (String [])attributes.elementAt(i);
if (array[0]==null || array[0]=="")
strings.addElement(new String(array[1]+"="+array[2]));
else
strings.addElement(new String(array[0]+":"+array[1]+"="+array[2]));
}
return strings;
}
}
/**
* Method to repopulate the attribute vector from a vector of singleStrings
* @param attStrings
*/
public void setAttributesFromSingleStringVector(Vector attStrings){
Vector stringArrays = new Vector();
if (attStrings == null)
attributes = null;
else{
this.attributes = new Vector();
for(int i =0; i<attStrings.size();i++){
String att = (String)attStrings.elementAt(i);
String [] array = new String [3];
int start = 0;
// get namespace
int pos = att.indexOf(":");
if (pos == -1){
array[0]=null;
start = 0;
}
else{
array[0]=att.substring(start, pos);
start = ++pos;
}
// get attribute name
pos = att.indexOf("=");
array[1]=att.substring(start,pos);
start = ++pos;
array[2]= att.substring(start);
this.setAttribute(array[0], array[1], array[2]);
}
}
}
/* ==== SERIALIZATION ==== */
/* TODO:
*
* this new serialization scheme is kind of lame. ideally, we shouldn't have to sub-class TreeElement at all; we
* should have an API that can seamlessly represent complex data model objects (like weight history or immunizations) as
* if they were explicity XML subtrees underneath the parent TreeElement
*
* failing that, we should wrap this scheme in an ExternalizableWrapper
*/
/*
* (non-Javadoc)
*
* @see org.javarosa.core.services.storage.utilities.Externalizable#readExternal(java.io.DataInputStream)
*/
public void readExternal(DataInputStream in, PrototypeFactory pf) throws IOException, DeserializationException {
name = ExtUtil.nullIfEmpty(ExtUtil.readString(in));
multiplicity = ExtUtil.readInt(in);
repeatable = ExtUtil.readBool(in);
value = (IAnswerData)ExtUtil.read(in, new ExtWrapNullable(new ExtWrapTagged()), pf);
//children = ExtUtil.nullIfEmpty((Vector)ExtUtil.read(in, new ExtWrapList(TreeElement.class), pf));
//Jan 22, 2009 - [email protected]
//old line: children = ExtUtil.nullIfEmpty((Vector)ExtUtil.read(in, new ExtWrapList(TreeElement.class), pf));
//New Child deserialization
//1. read null status as boolean
//2. read number of children
//3. for i < number of children
//3.1 if read boolean true , then create TreeElement and deserialize directly.
//3.2 if read boolean false then create tagged element and deserialize child
if(!ExtUtil.readBool(in)) {
//1.
children = null;
} else {
children = new Vector();
//2.
int numChildren = (int) ExtUtil.readNumeric(in);
//3.
for(int i = 0 ; i < numChildren ; ++i) {
boolean normal = ExtUtil.readBool(in);
if(normal) {
//3.1
TreeElement child = new TreeElement();
child.readExternal(in, pf);
children.addElement(child);
} else {
//3.2
TreeElement child = (TreeElement)ExtUtil.read(in, new ExtWrapTagged(), pf);
children.addElement(child);
}
}
}
//end Jan 22, 2009
dataType = ExtUtil.readInt(in);
relevant = ExtUtil.readBool(in);
required = ExtUtil.readBool(in);
enabled = ExtUtil.readBool(in);
relevantInherited = ExtUtil.readBool(in);
enabledInherited = ExtUtil.readBool(in);
constraint = (Constraint)ExtUtil.read(in, new ExtWrapNullable(Constraint.class), pf);
preloadHandler = ExtUtil.nullIfEmpty(ExtUtil.readString(in));
preloadParams = ExtUtil.nullIfEmpty(ExtUtil.readString(in));
Vector attStrings = ExtUtil.nullIfEmpty((Vector)ExtUtil.read(in, new ExtWrapList(String.class), pf));
setAttributesFromSingleStringVector(attStrings);
}
/*
* (non-Javadoc)
*
* @see org.javarosa.core.services.storage.utilities.Externalizable#writeExternal(java.io.DataOutputStream)
*/
public void writeExternal(DataOutputStream out) throws IOException {
ExtUtil.writeString(out, ExtUtil.emptyIfNull(name));
ExtUtil.writeNumeric(out, multiplicity);
ExtUtil.writeBool(out, repeatable);
ExtUtil.write(out, new ExtWrapNullable(value == null ? null : new ExtWrapTagged(value)));
//Jan 22, 2009 - [email protected]
//old line: ExtUtil.write(out, new ExtWrapList(ExtUtil.emptyIfNull(children)));
//New Child serialization
//1. write null status as boolean
//2. write number of children
//3. for all child in children
//3.1 if child type == TreeElement write boolean true , then serialize directly.
//3.2 if child type != TreeElement, write boolean false, then tagged child
if(children == null) {
//1.
ExtUtil.writeBool(out, false);
} else {
//1.
ExtUtil.writeBool(out, true);
//2.
ExtUtil.writeNumeric(out, children.size());
//3.
Enumeration en = children.elements();
while(en.hasMoreElements()) {
TreeElement child = (TreeElement)en.nextElement();
if(child.getClass() == TreeElement.class) {
//3.1
ExtUtil.writeBool(out, true);
child.writeExternal(out);
} else {
//3.2
ExtUtil.writeBool(out, false);
ExtUtil.write(out, new ExtWrapTagged(child));
}
}
}
//end Jan 22, 2009
ExtUtil.writeNumeric(out, dataType);
ExtUtil.writeBool(out, relevant);
ExtUtil.writeBool(out, required);
ExtUtil.writeBool(out, enabled);
ExtUtil.writeBool(out, relevantInherited);
ExtUtil.writeBool(out, enabledInherited);
ExtUtil.write(out, new ExtWrapNullable(constraint)); //TODO: inefficient for repeats
ExtUtil.writeString(out, ExtUtil.emptyIfNull(preloadHandler));
ExtUtil.writeString(out, ExtUtil.emptyIfNull(preloadParams));
Vector attStrings = getSingleStringAttributeVector();
ExtUtil.write(out, new ExtWrapList(ExtUtil.emptyIfNull(attStrings)));
}
} | [r2167] fix infinite loop!
| javarosa/org.javarosa.core.model/src/org/javarosa/core/model/instance/TreeElement.java | [r2167] fix infinite loop! | <ide><path>avarosa/org.javarosa.core.model/src/org/javarosa/core/model/instance/TreeElement.java
<ide> public TreeElement deepCopy (boolean includeTemplates) {
<ide> TreeElement newNode = shallowCopy();
<ide>
<add> newNode.children = null;
<ide> for (int i = 0; i < getNumChildren(); i++) {
<ide> TreeElement child = (TreeElement)children.elementAt(i);
<ide> if (includeTemplates || child.getMult() != TreeReference.INDEX_TEMPLATE) { |
|
Java | agpl-3.0 | 13f6f1657b08053ffb026e500d56801eb6a430c5 | 0 | phenotips/patient-network,phenotips/patient-network,phenotips/patient-network,phenotips/patient-network | /*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see http://www.gnu.org/licenses/
*/
package org.phenotips.matchingnotification.storage.internal;
import org.phenotips.matchingnotification.match.PatientMatch;
import org.phenotips.matchingnotification.storage.MatchStorageManager;
import org.xwiki.component.annotation.Component;
import java.util.Collections;
import java.util.List;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.apache.commons.lang3.StringUtils;
import org.hibernate.Criteria;
import org.hibernate.HibernateException;
import org.hibernate.Session;
import org.hibernate.Transaction;
import org.hibernate.criterion.Criterion;
import org.hibernate.criterion.Restrictions;
import org.slf4j.Logger;
import com.xpn.xwiki.store.hibernate.HibernateSessionFactory;
/**
* @version $Id$
*/
@Component
@Singleton
public class DefaultMatchStorageManager implements MatchStorageManager
{
/** Handles persistence. */
@Inject
private HibernateSessionFactory sessionFactory;
/** Logging helper object. */
@Inject
private Logger logger;
@Override
public void saveMatches(List<PatientMatch> matches) {
Session session = this.sessionFactory.getSessionFactory().openSession();
Transaction t = session.beginTransaction();
try {
for (PatientMatch match : matches) {
session.save(match);
}
t.commit();
} catch (HibernateException ex) {
this.logger.error("ERROR storing matches: [{}]", ex);
if (t != null) {
t.rollback();
}
throw ex;
} finally {
session.close();
}
}
@Override
public List<PatientMatch> loadAllMatches() {
return this.loadMatchesByCriterion(null);
}
@Override
public List<PatientMatch> loadMatchesByIds(List<Long> matchesIds) {
if (matchesIds != null && matchesIds.size() > 0) {
Criterion criterion = Restrictions.in("id", matchesIds.toArray());
return this.loadMatchesByCriterion(criterion);
} else {
return Collections.emptyList();
}
}
@Override
public List<PatientMatch> loadMatchesByReferencePatientId(String patientId)
{
if (StringUtils.isNotEmpty(patientId)) {
Criterion criterion = Restrictions.eq("patientId", patientId);
return this.loadMatchesByCriterion(criterion);
} else {
return Collections.emptyList();
}
}
@SuppressWarnings("unchecked")
private List<PatientMatch> loadMatchesByCriterion(Criterion criterion)
{
List<PatientMatch> matches = null;
Session session = this.sessionFactory.getSessionFactory().openSession();
try {
Criteria criteria = session.createCriteria(PatientMatch.class);
if (criterion != null) {
criteria.add(criterion);
}
matches = criteria.list();
} catch (HibernateException ex) {
this.logger.error("loadMatchesByCriterion. Criterion: {}, ERROR: [{}]", criterion, ex);
} finally {
session.close();
}
return matches;
}
// TODO remove, for debug.
@Override
public void clearMatches() {
List<PatientMatch> matches = loadAllMatches();
Session session = this.sessionFactory.getSessionFactory().openSession();
Transaction t = session.beginTransaction();
try {
session.clear();
for (PatientMatch match : matches) {
session.delete(match);
}
t.commit();
} catch (HibernateException ex) {
this.logger.error("ERROR deleting matches", ex);
if (t != null) {
t.rollback();
}
throw ex;
} finally {
session.close();
}
}
}
| matching-notification-api/src/main/java/org/phenotips/matchingnotification/storage/internal/DefaultMatchStorageManager.java | /*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see http://www.gnu.org/licenses/
*/
package org.phenotips.matchingnotification.storage.internal;
import org.phenotips.matchingnotification.match.PatientMatch;
import org.phenotips.matchingnotification.storage.MatchStorageManager;
import org.xwiki.component.annotation.Component;
import java.util.Collections;
import java.util.List;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.apache.commons.lang3.StringUtils;
import org.hibernate.Criteria;
import org.hibernate.HibernateException;
import org.hibernate.Session;
import org.hibernate.Transaction;
import org.hibernate.criterion.Criterion;
import org.hibernate.criterion.Restrictions;
import org.slf4j.Logger;
import com.xpn.xwiki.store.hibernate.HibernateSessionFactory;
/**
* @version $Id$
*/
@Component
@Singleton
public class DefaultMatchStorageManager implements MatchStorageManager
{
/** Handles persistence. */
@Inject
private HibernateSessionFactory sessionFactory;
/** Logging helper object. */
@Inject
private Logger logger;
@Override
public void saveMatches(List<PatientMatch> matches) {
Session session = this.sessionFactory.getSessionFactory().openSession();
Transaction t = session.beginTransaction();
try {
for (PatientMatch match : matches) {
session.save(match);
}
t.commit();
} catch (HibernateException ex) {
this.logger.error("ERROR storing matches: [{}]", ex);
if (t != null) {
t.rollback();
}
throw ex;
} finally {
session.close();
}
}
@Override
public List<PatientMatch> loadAllMatches() {
return loadMatchesByIds(null);
}
@SuppressWarnings("unchecked")
@Override
public List<PatientMatch> loadMatchesByIds(List<Long> matchesIds) {
List<PatientMatch> matches = null;
Session session = this.sessionFactory.getSessionFactory().openSession();
try {
Criteria criteria = session.createCriteria(PatientMatch.class);
if (matchesIds != null && matchesIds.size() > 0) {
// The string "patientId" depends on the implementation of PatientMatch. But I felt that
// making it more general is excessive.
criteria.add(Restrictions.in("id", matchesIds.toArray()));
}
matches = criteria.list();
} catch (HibernateException ex) {
this.logger.error("loadAllMatches: ERROR: [{}]", ex);
} finally {
session.close();
}
return matches;
}
@Override
public List<PatientMatch> loadMatchesByReferencePatientId(String patientId)
{
if (StringUtils.isNotEmpty(patientId)) {
Criterion criterion = Restrictions.eq("patientId", patientId);
return this.loadMatchesByCriterion(criterion);
} else {
return Collections.emptyList();
}
}
@SuppressWarnings("unchecked")
private List<PatientMatch> loadMatchesByCriterion(Criterion criterion)
{
List<PatientMatch> matches = null;
Session session = this.sessionFactory.getSessionFactory().openSession();
try {
Criteria criteria = session.createCriteria(PatientMatch.class);
if (criterion != null) {
criteria.add(criterion);
}
matches = criteria.list();
} catch (HibernateException ex) {
this.logger.error("loadMatchesByCriterion. Criterion: {}, ERROR: [{}]", criterion, ex);
} finally {
session.close();
}
return matches;
}
// TODO remove, for debug.
@Override
public void clearMatches() {
List<PatientMatch> matches = loadAllMatches();
Session session = this.sessionFactory.getSessionFactory().openSession();
Transaction t = session.beginTransaction();
try {
session.clear();
for (PatientMatch match : matches) {
session.delete(match);
}
t.commit();
} catch (HibernateException ex) {
this.logger.error("ERROR deleting matches", ex);
if (t != null) {
t.rollback();
}
throw ex;
} finally {
session.close();
}
}
}
| PN-123-matching-notification
Rewrote DefaultMatchStorageManager.loadAllMatches() and loadMatchesByIds().
| matching-notification-api/src/main/java/org/phenotips/matchingnotification/storage/internal/DefaultMatchStorageManager.java | PN-123-matching-notification Rewrote DefaultMatchStorageManager.loadAllMatches() and loadMatchesByIds(). | <ide><path>atching-notification-api/src/main/java/org/phenotips/matchingnotification/storage/internal/DefaultMatchStorageManager.java
<ide>
<ide> @Override
<ide> public List<PatientMatch> loadAllMatches() {
<del> return loadMatchesByIds(null);
<add> return this.loadMatchesByCriterion(null);
<ide> }
<ide>
<del> @SuppressWarnings("unchecked")
<ide> @Override
<ide> public List<PatientMatch> loadMatchesByIds(List<Long> matchesIds) {
<del> List<PatientMatch> matches = null;
<del> Session session = this.sessionFactory.getSessionFactory().openSession();
<del> try {
<del> Criteria criteria = session.createCriteria(PatientMatch.class);
<del> if (matchesIds != null && matchesIds.size() > 0) {
<del> // The string "patientId" depends on the implementation of PatientMatch. But I felt that
<del> // making it more general is excessive.
<del> criteria.add(Restrictions.in("id", matchesIds.toArray()));
<del> }
<del>
<del> matches = criteria.list();
<del> } catch (HibernateException ex) {
<del> this.logger.error("loadAllMatches: ERROR: [{}]", ex);
<del> } finally {
<del> session.close();
<add> if (matchesIds != null && matchesIds.size() > 0) {
<add> Criterion criterion = Restrictions.in("id", matchesIds.toArray());
<add> return this.loadMatchesByCriterion(criterion);
<add> } else {
<add> return Collections.emptyList();
<ide> }
<del> return matches;
<ide> }
<ide>
<ide> @Override |
|
Java | apache-2.0 | 54abac70b4e27e919c1960f8d1b924c6d0a1af8d | 0 | metaborg/spoofax,metaborg/spoofax,metaborg/spoofax,metaborg/spoofax,metaborg/spoofax-eclipse | package org.strategoxt.imp.runtime.parser.ast;
import static java.lang.Math.max;
import static org.spoofax.jsglr.Term.applAt;
import static org.spoofax.jsglr.Term.asAppl;
import static org.spoofax.jsglr.Term.intAt;
import static org.spoofax.jsglr.Term.isAppl;
import static org.spoofax.jsglr.Term.isInt;
import static org.spoofax.jsglr.Term.termAt;
import static org.strategoxt.imp.runtime.parser.tokens.TokenKind.TK_ERROR;
import static org.strategoxt.imp.runtime.parser.tokens.TokenKind.TK_LAYOUT;
import static org.strategoxt.imp.runtime.parser.tokens.TokenKindManager.isKeywordChar;
import java.util.ArrayList;
import lpg.runtime.IToken;
import lpg.runtime.PrsStream;
import org.spoofax.interpreter.terms.IStrategoList;
import org.spoofax.interpreter.terms.IStrategoTerm;
import org.spoofax.jsglr.RecoveryConnector;
import org.strategoxt.imp.runtime.Debug;
import org.strategoxt.imp.runtime.Environment;
import org.strategoxt.imp.runtime.parser.ParseErrorHandler;
import org.strategoxt.imp.runtime.parser.tokens.KeywordRecognizer;
import org.strategoxt.imp.runtime.parser.tokens.SGLRTokenizer;
import org.strategoxt.imp.runtime.parser.tokens.TokenKindManager;
import aterm.ATerm;
import aterm.ATermAppl;
import aterm.ATermInt;
import aterm.ATermList;
import aterm.pure.ATermListImpl;
/**
* Implodes an Asfix tree to AstNode nodes and IToken tokens.
*
* @author Lennart Kats <L.C.L.Kats add tudelft.nl>
*/
public class AsfixImploder {
private static final int EXPECTED_NODE_CHILDREN = 5;
protected static final int PARSE_TREE = 0;
protected static final int APPL_PROD = 0;
protected static final int APPL_CONTENTS = 1;
protected static final int PROD_LHS = 0;
protected static final int PROD_RHS = 1;
protected static final int PROD_ATTRS = 2;
private static final int NONE = -1;
protected final AstNodeFactory factory = new AstNodeFactory();
private final ProductionAttributeReader reader = new ProductionAttributeReader();
private final TokenKindManager tokenManager;
protected SGLRTokenizer tokenizer;
/** Character offset for the current implosion. */
protected int offset;
private int nonMatchingOffset = NONE;
private char nonMatchingChar, nonMatchingCharExpected, prevChar;
protected boolean inLexicalContext;
public AsfixImploder(TokenKindManager tokenManager) {
this.tokenManager = tokenManager;
}
public AstNode implode(ATerm asfix, SGLRTokenizer tokenizer) {
this.tokenizer = tokenizer;
// TODO: Return null if imploded tree has null constructor??
if (tokenizer.getCachedAst() != null)
return tokenizer.getCachedAst();
Debug.startTimer();
if (!(asfix instanceof ATermAppl || ((ATermAppl) asfix).getName().equals("parsetree")))
throw new IllegalArgumentException("Parse tree expected");
if (offset != 0 || tokenizer.getStartOffset() != 0)
throw new IllegalStateException("Race condition in AsfixImploder (" + tokenizer.getLexStream().getFileName() + "; might be caused by stack overflow)");
ATerm top = (ATerm) asfix.getChildAt(PARSE_TREE);
AstNode result;
offset = 0;
inLexicalContext = false;
try {
result = implodeAppl(top);
} finally {
tokenizer.endStream();
offset = 0;
nonMatchingOffset = NONE;
}
if (Debug.ENABLED) {
Debug.stopTimer("Parse tree imploded");
// Disabled; printing big trees causes delays
// Debug.log("Parsed " + result.toString());
}
tokenizer.setCachedAst(result);
return result;
}
/**
* Implode any appl(_, _).
*/
protected AstNode implodeAppl(ATerm term) {
// Note that this method significantly impacts our stack usage;
// method extraction should be carefully considered...
ATermAppl appl = resolveAmbiguities(term);
if (appl.getName().equals("amb"))
return implodeAmbAppl(appl);
ATermAppl prod = termAt(appl, APPL_PROD);
ATermList lhs = termAt(prod, PROD_LHS);
ATermAppl rhs = termAt(prod, PROD_RHS);
ATermAppl attrs = termAt(prod, PROD_ATTRS);
ATermList contents = termAt(appl, APPL_CONTENTS);
IToken prevToken = tokenizer.currentToken();
int lastOffset = offset;
// Enter lexical context if this is a lex node
boolean lexicalStart = !inLexicalContext && AsfixAnalyzer.isLexicalNode(rhs);
if (lexicalStart) inLexicalContext = true;
if (!inLexicalContext && "sort".equals(rhs.getName()) && lhs.getLength() == 1 && termAt(contents, 0).getType() == ATerm.INT) {
return setAnnos(createIntTerminal(contents, rhs), appl.getAnnotations());
}
boolean isList = !inLexicalContext && AsfixAnalyzer.isList(rhs);
boolean isVar = !inLexicalContext && !isList && AsfixAnalyzer.isVariableNode(rhs);
if (isVar) inLexicalContext = true;
ArrayList<AstNode> children = null;
if (!inLexicalContext)
children = new ArrayList<AstNode>(max(EXPECTED_NODE_CHILDREN, contents.getChildCount()));
// Recurse
for (int i = 0; i < contents.getLength(); i++) {
ATerm child = contents.elementAt(i);
if (isInt(child)) {
consumeLexicalChar((ATermInt) child);
} else {
AstNode childNode = implodeAppl(child);
if (childNode != null) children.add(childNode);
}
}
if (lexicalStart || isVar) {
return setAnnos(createStringTerminal(lhs, rhs, attrs), appl.getAnnotations());
} else if (inLexicalContext) {
createLayoutToken(rhs, lastOffset);
return null; // don't create tokens inside lexical context; just create one big token at the top
} else {
return setAnnos(createNodeOrInjection(lhs, rhs, attrs, prevToken, children, isList), appl.getAnnotations());
}
}
protected AmbAstNode implodeAmbAppl(ATermAppl node) {
final ATermListImpl ambs = termAt(node, 0);
final ArrayList<AstNode> results = new ArrayList<AstNode>();
final int oldOffset = offset;
final int oldBeginOffset = tokenizer.getStartOffset();
final boolean oldLexicalContext = inLexicalContext;
for (ATerm amb : ambs) {
// Restore lexical state for each branch
offset = oldOffset;
tokenizer.setStartOffset(oldBeginOffset);
inLexicalContext = oldLexicalContext;
AstNode result = implodeAppl(amb);
if (result == null)
return null;
results.add(result);
}
return new AmbAstNode(results);
}
private AstNode setAnnos(AstNode node, ATermList annos) {
if (node != null && annos != null && !annos.isEmpty()) {
IStrategoTerm termAnnos = Environment.getATermConverter().convert(annos);
node.setAnnotations((IStrategoList) termAnnos);
}
return node;
}
private AstNode createStringTerminal(ATermList lhs, ATermAppl rhs, ATermAppl attrs) {
inLexicalContext = false;
String sort = reader.getSort(rhs);
IToken token = tokenizer.makeToken(offset, tokenManager.getTokenKind(lhs, rhs), sort != null);
if (sort == null) return null;
// Debug.log("Creating node ", sort, " from ", SGLRTokenizer.dumpToString(token));
AstNode result = factory.createStringTerminal(getPaddedLexicalValue(attrs, token), sort, token);
String constructor = reader.getMetaVarConstructor(rhs);
if (constructor != null) {
ArrayList<AstNode> children = new ArrayList<AstNode>(1);
children.add(result);
result = factory.createNonTerminal(sort, constructor, token, token, children);
}
return result;
}
private IntAstNode createIntTerminal(ATermList contents, ATermAppl rhs) {
IToken token = tokenizer.makeToken(offset, tokenManager.getTokenKind(contents, rhs), true);
String sort = reader.getSort(rhs);
int value = intAt(contents, 0);
return factory.createIntTerminal(sort, token, value);
}
private AstNode createNodeOrInjection(ATermList lhs, ATermAppl rhs, ATermAppl attrs,
IToken prevToken, ArrayList<AstNode> children, boolean isList) {
String constructor = reader.getConsAttribute(attrs);
String sort = reader.getSort(rhs);
if(constructor == null) {
if (isList) {
return createNode(attrs, sort, null, prevToken, children, true);
}
ATerm ast = reader.getAstAttribute(attrs);
if (ast != null) {
return createAstNonTerminal(rhs, prevToken, children, ast);
} else if (children.size() == 0) {
return createNode(attrs, sort, "None", prevToken, children, false);
} else if ("opt".equals(applAt(rhs, 0).getName())) {
assert children.size() == 1;
AstNode child = children.get(0);
return new AstNode(sort, child.getLeftIToken(), child.getRightIToken(), "Some", children);
} else {
// Injection
assert children.size() == 1;
return children.get(0);
}
} else {
tokenizer.makeToken(offset, tokenManager.getTokenKind(lhs, rhs));
return createNode(attrs, sort, constructor, prevToken, children, isList);
}
}
/** Implode a context-free node. */
private AstNode createNode(ATermAppl attrs, String sort, String constructor, IToken prevToken,
ArrayList<AstNode> children, boolean isList) {
IToken left = getStartToken(prevToken);
IToken right = getEndToken(left, tokenizer.currentToken());
/*
if (Debug.ENABLED) {
String name = isList ? "list" : sort;
Debug.log("Creating node ", name, ":", constructor, AstNode.getSorts(children), " from ", SGLRTokenizer.dumpToString(left, right));
}
*/
if (isList) {
return factory.createList(sort, left, right, children);
} else if (constructor == null && children.size() == 1 && children.get(0).getSort() == AstNode.STRING_SORT) {
// Child node was a <string> node (rare case); unpack it and create a new terminal
assert left == right && children.get(0).getChildren().size() == 0;
return factory.createStringTerminal(getPaddedLexicalValue(attrs, left), sort, left);
} else {
return factory.createNonTerminal(sort, constructor, left, right, children);
}
}
/**
* Gets the padded lexical value for {indentpadding} lexicals, or returns null.
*/
private String getPaddedLexicalValue(ATermAppl attrs, IToken startToken) {
if (reader.isIndentPaddingLexical(attrs)) {
char[] inputChars = tokenizer.getLexStream().getInputChars();
int lineStart = startToken.getStartOffset() - 1;
if (lineStart < 0) return null;
while (lineStart >= 0) {
char c = inputChars[lineStart--];
if (c == '\n' || c == '\r') {
lineStart++;
break;
}
}
StringBuilder result = new StringBuilder();
result.append(inputChars, lineStart, startToken.getStartOffset() - lineStart - 1);
for (int i = 0; i < result.length(); i++) {
char c = result.charAt(i);
if (c != ' ' && c != '\t') result.setCharAt(i, ' ');
}
result.append(startToken.toString());
return result.toString();
} else {
return null; // lazily load token string value
}
}
/** Implode a context-free node with an {ast} annotation. */
private AstNode createAstNonTerminal(ATermAppl rhs, IToken prevToken, ArrayList<AstNode> children, ATerm ast) {
IToken left = getStartToken(prevToken);
IToken right = getEndToken(left, tokenizer.currentToken());
AstAnnoImploder imploder = new AstAnnoImploder(factory, children, left, right);
return imploder.implode(ast, reader.getSort(rhs));
}
/**
* Resolve or ignore any ambiguities in the parse tree.
*/
protected ATermAppl resolveAmbiguities(final ATerm node) {
if (!"amb".equals(((ATermAppl) node).getName()))
return (ATermAppl) node;
final ATermListImpl ambs = termAt(node, 0);
ATermAppl lastNonAvoid = null;
ATermAppl firstOption = null;
boolean multipleNonAvoids = false;
alts:
for (int i = 0; i < ambs.getLength(); i++) {
ATermAppl prod = resolveAmbiguities(termAt(ambs, i));
if (firstOption == null) firstOption = prod;
ATermAppl appl = termAt(prod, APPL_PROD);
ATermAppl attrs = termAt(appl, PROD_ATTRS);
if ("attrs".equals(attrs.getName())) {
ATermList attrList = termAt(attrs, 0);
for (int j = 0; j < attrList.getLength(); j++) {
ATerm attr = termAt(attrList, j);
if (isAppl(attr) && "prefer".equals(asAppl(attr).getName())) {
return prod;
} else if (isAppl(attr) && "avoid".equals(asAppl(attr).getName())) {
continue alts;
}
}
if (lastNonAvoid == null) {
lastNonAvoid = prod;
} else {
multipleNonAvoids = true;
}
}
}
if (!multipleNonAvoids) {
return lastNonAvoid != null ? lastNonAvoid : firstOption;
} else {
if (Debug.ENABLED && !inLexicalContext) reportUnresolvedAmb(ambs);
return firstOption;
}
}
private static void reportUnresolvedAmb(ATermList ambs) {
Debug.log("Ambiguity found during implosion: ");
for (ATerm amb : ambs) {
String ambString = amb.toString();
if (ambString.length() > 1000) ambString = ambString.substring(0, 1000) + "...";
Debug.log(" amb: ", ambString);
}
}
/** Get the token after the previous node's ending token, or null if N/A. */
private IToken getStartToken(IToken prevToken) {
PrsStream parseStream = tokenizer.getParseStream();
if (prevToken == null) {
return parseStream.getSize() == 0 ? null
: parseStream.getTokenAt(0);
} else {
int index = prevToken.getTokenIndex();
if (parseStream.getSize() - index <= 1) {
// Create new empty token
// HACK: Assume TK_LAYOUT kind for empty tokens in AST nodes
return tokenizer.makeToken(offset, TK_LAYOUT, true);
} else {
return parseStream.getTokenAt(index + 1);
}
}
}
/** Get the last no-layout token for an AST node. */
private IToken getEndToken(IToken startToken, IToken lastToken) {
PrsStream parseStream = tokenizer.getParseStream();
int begin = startToken.getTokenIndex();
for (int i = lastToken.getTokenIndex(); i > begin; i--) {
lastToken = parseStream.getTokenAt(i);
if (lastToken.getKind() != TK_LAYOUT.ordinal()
|| lastToken.getStartOffset() == lastToken.getEndOffset()-1)
break;
}
return lastToken;
}
/** Consume a character of a lexical terminal. */
protected final void consumeLexicalChar(ATermInt character) {
char[] inputChars = tokenizer.getLexStream().getInputChars();
if (offset >= inputChars.length) {
if (nonMatchingOffset != NONE) {
Environment.logException(new ImploderException("Character in parse tree after end of input stream: "
+ (char) character.getInt()
+ " - may be caused by unexcepted character in parse tree at position "
+ nonMatchingChar + ": " + nonMatchingChar + " instead of "
+ nonMatchingCharExpected));
}
// UNDONE: Strict lexical stream checking
// throw new ImploderException("Character in parse tree after end of input stream: " + (char) character.getInt());
// a forced reduction may have added some extra characters to the tree;
inputChars[inputChars.length - 1] = ParseErrorHandler.UNEXPECTED_EOF_CHAR;
return;
}
char parsedChar = (char) character.getInt();
char inputChar = inputChars[offset];
if (parsedChar != inputChar) {
if (RecoveryConnector.isLayoutCharacter(parsedChar)) {
// Remember that the parser skipped the current character
// for later error reporting. (Cannot modify the immutable
// parse tree here; changing the original stream instead.)
inputChars[offset] = ParseErrorHandler.SKIPPED_CHAR;
createSkippedToken(inputChars, inputChar);
offset++;
} else {
// UNDONE: Strict lexical stream checking
// throw new IllegalStateException("Character from asfix stream (" + parsedChar
// + ") must be in lex stream (" + inputChar + ")");
// instead, we allow the non-matching character for now, and hope
// we can pick up the right track later
// TODO: better way to report skipped fragments in the parser
// this isn't 100% reliable
if (nonMatchingOffset == NONE) {
nonMatchingOffset = offset;
nonMatchingChar = parsedChar;
nonMatchingCharExpected = inputChar;
}
inputChars[offset] = ParseErrorHandler.SKIPPED_CHAR;
}
} else {
offset++;
}
prevChar = inputChar;
}
/**
* Creates an artificial token at keyword boundaries
* inside skipped regions of code.
* Required for keyword highlighting with {@link KeywordRecognizer}.
*/
private void createSkippedToken(char[] inputChars, char inputChar) {
boolean isInputKeywordChar = isKeywordChar(inputChar);
if (offset > 0) {
if ((isInputKeywordChar && !isKeywordChar(prevChar))
|| (!isInputKeywordChar && isKeywordChar(prevChar))) {
tokenizer.makeToken(offset - 1, TK_ERROR, false);
}
}
if (offset + 1 < inputChars.length) {
char nextChar = inputChars[offset + 1];
if ((isInputKeywordChar && !isKeywordChar(nextChar))
|| (!isInputKeywordChar && isKeywordChar(nextChar))) {
tokenizer.makeToken(offset + 1, TK_ERROR, false);
}
}
}
/**
* Creates an artificial token for every water-based recovery
* and for comments within layout.
*/
private void createLayoutToken(ATermAppl rhs, int lastOffset) {
// Create separate tokens for >1 char layout lexicals (e.g., comments)
if (offset > lastOffset + 1 && AsfixAnalyzer.isLexLayout(rhs)) {
tokenizer.makeToken(lastOffset, TK_LAYOUT, false);
tokenizer.makeToken(offset, TK_LAYOUT, false);
} else {
String sort = reader.getSort(rhs);
if ("WATERTOKEN".equals(sort) || "WATERTOKENSEPARATOR".equals(sort)) {
tokenizer.makeToken(lastOffset, TK_LAYOUT, false);
tokenizer.makeToken(offset, TK_ERROR, false);
}
}
}
}
| org.strategoxt.imp.runtime/src/org/strategoxt/imp/runtime/parser/ast/AsfixImploder.java | package org.strategoxt.imp.runtime.parser.ast;
import static java.lang.Math.max;
import static org.spoofax.jsglr.Term.applAt;
import static org.spoofax.jsglr.Term.asAppl;
import static org.spoofax.jsglr.Term.intAt;
import static org.spoofax.jsglr.Term.isAppl;
import static org.spoofax.jsglr.Term.isInt;
import static org.spoofax.jsglr.Term.termAt;
import static org.strategoxt.imp.runtime.parser.tokens.TokenKind.TK_ERROR;
import static org.strategoxt.imp.runtime.parser.tokens.TokenKind.TK_LAYOUT;
import static org.strategoxt.imp.runtime.parser.tokens.TokenKindManager.isKeywordChar;
import java.util.ArrayList;
import lpg.runtime.IToken;
import lpg.runtime.PrsStream;
import org.spoofax.interpreter.terms.IStrategoList;
import org.spoofax.interpreter.terms.IStrategoTerm;
import org.spoofax.jsglr.RecoveryConnector;
import org.strategoxt.imp.runtime.Debug;
import org.strategoxt.imp.runtime.Environment;
import org.strategoxt.imp.runtime.parser.ParseErrorHandler;
import org.strategoxt.imp.runtime.parser.tokens.KeywordRecognizer;
import org.strategoxt.imp.runtime.parser.tokens.SGLRTokenizer;
import org.strategoxt.imp.runtime.parser.tokens.TokenKindManager;
import aterm.ATerm;
import aterm.ATermAppl;
import aterm.ATermInt;
import aterm.ATermList;
import aterm.pure.ATermListImpl;
/**
* Implodes an Asfix tree to AstNode nodes and IToken tokens.
*
* @author Lennart Kats <L.C.L.Kats add tudelft.nl>
*/
public class AsfixImploder {
private static final int EXPECTED_NODE_CHILDREN = 5;
protected static final int PARSE_TREE = 0;
protected static final int APPL_PROD = 0;
protected static final int APPL_CONTENTS = 1;
protected static final int PROD_LHS = 0;
protected static final int PROD_RHS = 1;
protected static final int PROD_ATTRS = 2;
private static final int NONE = -1;
protected final AstNodeFactory factory = new AstNodeFactory();
private final ProductionAttributeReader reader = new ProductionAttributeReader();
private final TokenKindManager tokenManager;
protected SGLRTokenizer tokenizer;
/** Character offset for the current implosion. */
protected int offset;
private int nonMatchingOffset = NONE;
private char nonMatchingChar, nonMatchingCharExpected, prevChar;
protected boolean inLexicalContext;
public AsfixImploder(TokenKindManager tokenManager) {
this.tokenManager = tokenManager;
}
public AstNode implode(ATerm asfix, SGLRTokenizer tokenizer) {
this.tokenizer = tokenizer;
// TODO: Return null if imploded tree has null constructor??
if (tokenizer.getCachedAst() != null)
return tokenizer.getCachedAst();
Debug.startTimer();
if (!(asfix instanceof ATermAppl || ((ATermAppl) asfix).getName().equals("parsetree")))
throw new IllegalArgumentException("Parse tree expected");
if (offset != 0 || tokenizer.getStartOffset() != 0)
throw new IllegalStateException("Race condition in AsfixImploder (" + tokenizer.getLexStream().getFileName() + "; might be caused by stack overflow)");
ATerm top = (ATerm) asfix.getChildAt(PARSE_TREE);
AstNode result;
offset = 0;
inLexicalContext = false;
try {
result = implodeAppl(top);
} finally {
tokenizer.endStream();
offset = 0;
nonMatchingOffset = NONE;
}
if (Debug.ENABLED) {
Debug.stopTimer("Parse tree imploded");
// Disabled; printing big trees causes delays
// Debug.log("Parsed " + result.toString());
}
tokenizer.setCachedAst(result);
return result;
}
/**
* Implode any appl(_, _).
*/
protected AstNode implodeAppl(ATerm term) {
// Note that this method significantly impacts our stack usage;
// method extraction should be carefully considered...
ATermAppl appl = resolveAmbiguities(term);
if (appl.getName().equals("amb"))
return implodeAmbAppl(appl);
ATermAppl prod = termAt(appl, APPL_PROD);
ATermList lhs = termAt(prod, PROD_LHS);
ATermAppl rhs = termAt(prod, PROD_RHS);
ATermAppl attrs = termAt(prod, PROD_ATTRS);
ATermList contents = termAt(appl, APPL_CONTENTS);
IToken prevToken = tokenizer.currentToken();
int lastOffset = offset;
// Enter lexical context if this is a lex node
boolean lexicalStart = !inLexicalContext && AsfixAnalyzer.isLexicalNode(rhs);
if (lexicalStart) inLexicalContext = true;
if (!inLexicalContext && "sort".equals(rhs.getName()) && lhs.getLength() == 1 && termAt(contents, 0).getType() == ATerm.INT) {
return setAnnos(createIntTerminal(contents, rhs), appl.getAnnotations());
}
boolean isList = !inLexicalContext && AsfixAnalyzer.isList(rhs);
boolean isVar = !inLexicalContext && !isList && AsfixAnalyzer.isVariableNode(rhs);
if (isVar) inLexicalContext = true;
ArrayList<AstNode> children = null;
if (!inLexicalContext)
children = new ArrayList<AstNode>(max(EXPECTED_NODE_CHILDREN, contents.getChildCount()));
// Recurse
for (int i = 0; i < contents.getLength(); i++) {
ATerm child = contents.elementAt(i);
if (isInt(child)) {
consumeLexicalChar((ATermInt) child);
} else {
AstNode childNode = implodeAppl(child);
if (childNode != null) children.add(childNode);
}
}
if (lexicalStart || isVar) {
return setAnnos(createStringTerminal(lhs, rhs, attrs), appl.getAnnotations());
} else if (inLexicalContext) {
// Create separate tokens for >1 char layout lexicals (e.g., comments)
if (offset > lastOffset + 1 && AsfixAnalyzer.isLexLayout(rhs)) {
tokenizer.makeToken(lastOffset, TK_LAYOUT, false);
tokenizer.makeToken(offset, TK_LAYOUT, false);
} else {
String sort = reader.getSort(rhs);
if ("WATERTOKEN".equals(sort) || "WATERTOKENSEPARATOR".equals(sort)) {
tokenizer.makeToken(lastOffset, TK_LAYOUT, false);
tokenizer.makeToken(offset, TK_ERROR, false);
}
}
return null; // don't create tokens inside lexical context; just create one big token at the top
} else {
return setAnnos(createNodeOrInjection(lhs, rhs, attrs, prevToken, children, isList), appl.getAnnotations());
}
}
protected AmbAstNode implodeAmbAppl(ATermAppl node) {
final ATermListImpl ambs = termAt(node, 0);
final ArrayList<AstNode> results = new ArrayList<AstNode>();
final int oldOffset = offset;
final int oldBeginOffset = tokenizer.getStartOffset();
final boolean oldLexicalContext = inLexicalContext;
for (ATerm amb : ambs) {
// Restore lexical state for each branch
offset = oldOffset;
tokenizer.setStartOffset(oldBeginOffset);
inLexicalContext = oldLexicalContext;
AstNode result = implodeAppl(amb);
if (result == null)
return null;
results.add(result);
}
return new AmbAstNode(results);
}
private AstNode setAnnos(AstNode node, ATermList annos) {
if (node != null && annos != null && !annos.isEmpty()) {
IStrategoTerm termAnnos = Environment.getATermConverter().convert(annos);
node.setAnnotations((IStrategoList) termAnnos);
}
return node;
}
private AstNode createStringTerminal(ATermList lhs, ATermAppl rhs, ATermAppl attrs) {
inLexicalContext = false;
String sort = reader.getSort(rhs);
IToken token = tokenizer.makeToken(offset, tokenManager.getTokenKind(lhs, rhs), sort != null);
if (sort == null) return null;
// Debug.log("Creating node ", sort, " from ", SGLRTokenizer.dumpToString(token));
AstNode result = factory.createStringTerminal(getPaddedLexicalValue(attrs, token), sort, token);
String constructor = reader.getMetaVarConstructor(rhs);
if (constructor != null) {
ArrayList<AstNode> children = new ArrayList<AstNode>(1);
children.add(result);
result = factory.createNonTerminal(sort, constructor, token, token, children);
}
return result;
}
private IntAstNode createIntTerminal(ATermList contents, ATermAppl rhs) {
IToken token = tokenizer.makeToken(offset, tokenManager.getTokenKind(contents, rhs), true);
String sort = reader.getSort(rhs);
int value = intAt(contents, 0);
return factory.createIntTerminal(sort, token, value);
}
private AstNode createNodeOrInjection(ATermList lhs, ATermAppl rhs, ATermAppl attrs,
IToken prevToken, ArrayList<AstNode> children, boolean isList) {
String constructor = reader.getConsAttribute(attrs);
String sort = reader.getSort(rhs);
if(constructor == null) {
if (isList) {
return createNode(attrs, sort, null, prevToken, children, true);
}
ATerm ast = reader.getAstAttribute(attrs);
if (ast != null) {
return createAstNonTerminal(rhs, prevToken, children, ast);
} else if (children.size() == 0) {
return createNode(attrs, sort, "None", prevToken, children, false);
} else if ("opt".equals(applAt(rhs, 0).getName())) {
assert children.size() == 1;
AstNode child = children.get(0);
return new AstNode(sort, child.getLeftIToken(), child.getRightIToken(), "Some", children);
} else {
// Injection
assert children.size() == 1;
return children.get(0);
}
} else {
tokenizer.makeToken(offset, tokenManager.getTokenKind(lhs, rhs));
return createNode(attrs, sort, constructor, prevToken, children, isList);
}
}
/** Implode a context-free node. */
private AstNode createNode(ATermAppl attrs, String sort, String constructor, IToken prevToken,
ArrayList<AstNode> children, boolean isList) {
IToken left = getStartToken(prevToken);
IToken right = getEndToken(left, tokenizer.currentToken());
/*
if (Debug.ENABLED) {
String name = isList ? "list" : sort;
Debug.log("Creating node ", name, ":", constructor, AstNode.getSorts(children), " from ", SGLRTokenizer.dumpToString(left, right));
}
*/
if (isList) {
return factory.createList(sort, left, right, children);
} else if (constructor == null && children.size() == 1 && children.get(0).getSort() == AstNode.STRING_SORT) {
// Child node was a <string> node (rare case); unpack it and create a new terminal
assert left == right && children.get(0).getChildren().size() == 0;
return factory.createStringTerminal(getPaddedLexicalValue(attrs, left), sort, left);
} else {
return factory.createNonTerminal(sort, constructor, left, right, children);
}
}
/**
* Gets the padded lexical value for {indentpadding} lexicals, or returns null.
*/
private String getPaddedLexicalValue(ATermAppl attrs, IToken startToken) {
if (reader.isIndentPaddingLexical(attrs)) {
char[] inputChars = tokenizer.getLexStream().getInputChars();
int lineStart = startToken.getStartOffset() - 1;
if (lineStart < 0) return null;
while (lineStart >= 0) {
char c = inputChars[lineStart--];
if (c == '\n' || c == '\r') {
lineStart++;
break;
}
}
StringBuilder result = new StringBuilder();
result.append(inputChars, lineStart, startToken.getStartOffset() - lineStart - 1);
for (int i = 0; i < result.length(); i++) {
char c = result.charAt(i);
if (c != ' ' && c != '\t') result.setCharAt(i, ' ');
}
result.append(startToken.toString());
return result.toString();
} else {
return null; // lazily load token string value
}
}
/** Implode a context-free node with an {ast} annotation. */
private AstNode createAstNonTerminal(ATermAppl rhs, IToken prevToken, ArrayList<AstNode> children, ATerm ast) {
IToken left = getStartToken(prevToken);
IToken right = getEndToken(left, tokenizer.currentToken());
AstAnnoImploder imploder = new AstAnnoImploder(factory, children, left, right);
return imploder.implode(ast, reader.getSort(rhs));
}
/**
* Resolve or ignore any ambiguities in the parse tree.
*/
protected ATermAppl resolveAmbiguities(final ATerm node) {
if (!"amb".equals(((ATermAppl) node).getName()))
return (ATermAppl) node;
final ATermListImpl ambs = termAt(node, 0);
ATermAppl lastNonAvoid = null;
ATermAppl firstOption = null;
boolean multipleNonAvoids = false;
alts:
for (int i = 0; i < ambs.getLength(); i++) {
ATermAppl prod = resolveAmbiguities(termAt(ambs, i));
if (firstOption == null) firstOption = prod;
ATermAppl appl = termAt(prod, APPL_PROD);
ATermAppl attrs = termAt(appl, PROD_ATTRS);
if ("attrs".equals(attrs.getName())) {
ATermList attrList = termAt(attrs, 0);
for (int j = 0; j < attrList.getLength(); j++) {
ATerm attr = termAt(attrList, j);
if (isAppl(attr) && "prefer".equals(asAppl(attr).getName())) {
return prod;
} else if (isAppl(attr) && "avoid".equals(asAppl(attr).getName())) {
continue alts;
}
}
if (lastNonAvoid == null) {
lastNonAvoid = prod;
} else {
multipleNonAvoids = true;
}
}
}
if (!multipleNonAvoids) {
return lastNonAvoid != null ? lastNonAvoid : firstOption;
} else {
if (Debug.ENABLED && !inLexicalContext) reportUnresolvedAmb(ambs);
return firstOption;
}
}
private static void reportUnresolvedAmb(ATermList ambs) {
Debug.log("Ambiguity found during implosion: ");
for (ATerm amb : ambs) {
String ambString = amb.toString();
if (ambString.length() > 1000) ambString = ambString.substring(0, 1000) + "...";
Debug.log(" amb: ", ambString);
}
}
/** Get the token after the previous node's ending token, or null if N/A. */
private IToken getStartToken(IToken prevToken) {
PrsStream parseStream = tokenizer.getParseStream();
if (prevToken == null) {
return parseStream.getSize() == 0 ? null
: parseStream.getTokenAt(0);
} else {
int index = prevToken.getTokenIndex();
if (parseStream.getSize() - index <= 1) {
// Create new empty token
// HACK: Assume TK_LAYOUT kind for empty tokens in AST nodes
return tokenizer.makeToken(offset, TK_LAYOUT, true);
} else {
return parseStream.getTokenAt(index + 1);
}
}
}
/** Get the last no-layout token for an AST node. */
private IToken getEndToken(IToken startToken, IToken lastToken) {
PrsStream parseStream = tokenizer.getParseStream();
int begin = startToken.getTokenIndex();
for (int i = lastToken.getTokenIndex(); i > begin; i--) {
lastToken = parseStream.getTokenAt(i);
if (lastToken.getKind() != TK_LAYOUT.ordinal()
|| lastToken.getStartOffset() == lastToken.getEndOffset()-1)
break;
}
return lastToken;
}
/** Consume a character of a lexical terminal. */
protected final void consumeLexicalChar(ATermInt character) {
char[] inputChars = tokenizer.getLexStream().getInputChars();
if (offset >= inputChars.length) {
if (nonMatchingOffset != NONE) {
Environment.logException(new ImploderException("Character in parse tree after end of input stream: "
+ (char) character.getInt()
+ " - may be caused by unexcepted character in parse tree at position "
+ nonMatchingChar + ": " + nonMatchingChar + " instead of "
+ nonMatchingCharExpected));
}
// UNDONE: Strict lexical stream checking
// throw new ImploderException("Character in parse tree after end of input stream: " + (char) character.getInt());
// a forced reduction may have added some extra characters to the tree;
inputChars[inputChars.length - 1] = ParseErrorHandler.UNEXPECTED_EOF_CHAR;
return;
}
char parsedChar = (char) character.getInt();
char inputChar = inputChars[offset];
if (parsedChar != inputChar) {
if (RecoveryConnector.isLayoutCharacter(parsedChar)) {
// Remember that the parser skipped the current character
// for later error reporting. (Cannot modify the immutable
// parse tree here; changing the original stream instead.)
inputChars[offset] = ParseErrorHandler.SKIPPED_CHAR;
createSkippedToken(inputChars, inputChar);
offset++;
} else {
// UNDONE: Strict lexical stream checking
// throw new IllegalStateException("Character from asfix stream (" + parsedChar
// + ") must be in lex stream (" + inputChar + ")");
// instead, we allow the non-matching character for now, and hope
// we can pick up the right track later
// TODO: better way to report skipped fragments in the parser
// this isn't 100% reliable
if (nonMatchingOffset == NONE) {
nonMatchingOffset = offset;
nonMatchingChar = parsedChar;
nonMatchingCharExpected = inputChar;
}
inputChars[offset] = ParseErrorHandler.SKIPPED_CHAR;
}
} else {
offset++;
}
prevChar = inputChar;
}
/**
* Creates an artificial token at keyword boundaries
* inside skipped regions of code.
* Required for keyword highlighting with {@link KeywordRecognizer}.
*/
private void createSkippedToken(char[] inputChars, char inputChar) {
boolean isInputKeywordChar = isKeywordChar(inputChar);
if (offset > 0) {
if ((isInputKeywordChar && !isKeywordChar(prevChar))
|| (!isInputKeywordChar && isKeywordChar(prevChar))) {
tokenizer.makeToken(offset - 1, TK_ERROR, false);
}
}
if (offset + 1 < inputChars.length) {
char nextChar = inputChars[offset + 1];
if ((isInputKeywordChar && !isKeywordChar(nextChar))
|| (!isInputKeywordChar && isKeywordChar(nextChar))) {
tokenizer.makeToken(offset + 1, TK_ERROR, false);
}
}
}
}
| minor refactoring
svn path=/spoofax-imp/trunk/org.strategoxt.imp.runtime/; revision=20961
| org.strategoxt.imp.runtime/src/org/strategoxt/imp/runtime/parser/ast/AsfixImploder.java | minor refactoring | <ide><path>rg.strategoxt.imp.runtime/src/org/strategoxt/imp/runtime/parser/ast/AsfixImploder.java
<ide> if (lexicalStart || isVar) {
<ide> return setAnnos(createStringTerminal(lhs, rhs, attrs), appl.getAnnotations());
<ide> } else if (inLexicalContext) {
<del> // Create separate tokens for >1 char layout lexicals (e.g., comments)
<del> if (offset > lastOffset + 1 && AsfixAnalyzer.isLexLayout(rhs)) {
<del> tokenizer.makeToken(lastOffset, TK_LAYOUT, false);
<del> tokenizer.makeToken(offset, TK_LAYOUT, false);
<del> } else {
<del> String sort = reader.getSort(rhs);
<del> if ("WATERTOKEN".equals(sort) || "WATERTOKENSEPARATOR".equals(sort)) {
<del> tokenizer.makeToken(lastOffset, TK_LAYOUT, false);
<del> tokenizer.makeToken(offset, TK_ERROR, false);
<del> }
<del> }
<add> createLayoutToken(rhs, lastOffset);
<ide> return null; // don't create tokens inside lexical context; just create one big token at the top
<ide> } else {
<ide> return setAnnos(createNodeOrInjection(lhs, rhs, attrs, prevToken, children, isList), appl.getAnnotations());
<ide> }
<ide> }
<ide> }
<add>
<add> /**
<add> * Creates an artificial token for every water-based recovery
<add> * and for comments within layout.
<add> */
<add> private void createLayoutToken(ATermAppl rhs, int lastOffset) {
<add> // Create separate tokens for >1 char layout lexicals (e.g., comments)
<add> if (offset > lastOffset + 1 && AsfixAnalyzer.isLexLayout(rhs)) {
<add> tokenizer.makeToken(lastOffset, TK_LAYOUT, false);
<add> tokenizer.makeToken(offset, TK_LAYOUT, false);
<add> } else {
<add> String sort = reader.getSort(rhs);
<add> if ("WATERTOKEN".equals(sort) || "WATERTOKENSEPARATOR".equals(sort)) {
<add> tokenizer.makeToken(lastOffset, TK_LAYOUT, false);
<add> tokenizer.makeToken(offset, TK_ERROR, false);
<add> }
<add> }
<add> }
<ide> } |
|
Java | mit | 50c47f715ac3799f54a38a5306bd3e98a1c51a69 | 0 | breadwallet/breadwallet-android,breadwallet/breadwallet-android,breadwallet/breadwallet-android,breadwallet/breadwallet-android | package com.breadwallet.tools.adapter;
import android.app.Activity;
import android.content.Context;
import android.graphics.Typeface;
import android.support.annotation.NonNull;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.util.TypedValue;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageView;
import com.breadwallet.R;
import com.breadwallet.presenter.customviews.BaseTextView;
import com.breadwallet.presenter.entities.TokenItem;
import com.breadwallet.tools.util.BRConstants;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
public class AddTokenListAdapter extends RecyclerView.Adapter<AddTokenListAdapter.TokenItemViewHolder> {
private Context mContext;
private ArrayList<TokenItem> mTokens;
private ArrayList<TokenItem> mBackupTokens;
private static final String TAG = AddTokenListAdapter.class.getSimpleName();
private OnTokenAddOrRemovedListener mListener;
public AddTokenListAdapter(Context context, ArrayList<TokenItem> tokens, OnTokenAddOrRemovedListener listener) {
this.mContext = context;
this.mTokens = tokens;
this.mListener = listener;
this.mBackupTokens = mTokens;
Collections.sort(mTokens, new Comparator<TokenItem>() {
@Override
public int compare(TokenItem one, TokenItem two) {
return one.symbol.compareToIgnoreCase(two.symbol);
}
});
}
public interface OnTokenAddOrRemovedListener {
void onTokenAdded(TokenItem token);
void onTokenRemoved(TokenItem token);
}
@Override
public void onBindViewHolder(final @NonNull AddTokenListAdapter.TokenItemViewHolder holder, final int position) {
TokenItem item = mTokens.get(position);
String currencyCode = item.symbol.toLowerCase();
if (currencyCode.equals("1st")) {
currencyCode = "first";
}
String iconResourceName = currencyCode;
int iconResourceId = mContext.getResources().getIdentifier(currencyCode, BRConstants.DRAWABLE, mContext.getPackageName());
holder.name.setText(mTokens.get(position).symbol);
holder.symbol.setText(mTokens.get(position).name);
try {
holder.logo.setBackground(mContext.getDrawable(iconResourceId));
} catch (Exception e) {
e.printStackTrace();
Log.d(TAG, "Error finding icon for -> " + iconResourceName);
}
TypedValue addWalletTypedValue = new TypedValue();
TypedValue removeWalletTypedValue = new TypedValue();
mContext.getTheme().resolveAttribute(R.attr.add_wallet_button_background, addWalletTypedValue, true);
mContext.getTheme().resolveAttribute(R.attr.remove_wallet_button_background, removeWalletTypedValue, true);
holder.addRemoveButton.setText(mContext.getString(item.isAdded ? R.string.TokenList_remove : R.string.TokenList_add));
holder.addRemoveButton.setBackground(mContext.getDrawable(item.isAdded ? removeWalletTypedValue.resourceId : addWalletTypedValue.resourceId));
holder.addRemoveButton.setTextColor(mContext.getColor(item.isAdded ? R.color.button_cancel_add_wallet_text : R.color.button_add_wallet_text));
holder.addRemoveButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// Set button to "Remove"
if (!mTokens.get(position).isAdded) {
mTokens.get(position).isAdded = true;
mListener.onTokenAdded(mTokens.get(position));
} else {
// Set button back to "Add"
mTokens.get(position).isAdded = false;
mListener.onTokenRemoved(mTokens.get(position));
}
}
});
}
@Override
public int getItemCount() {
return mTokens.size();
}
@NonNull
@Override
public AddTokenListAdapter.TokenItemViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
LayoutInflater inflater = ((Activity) mContext).getLayoutInflater();
View convertView = inflater.inflate(R.layout.token_list_item, parent, false);
TokenItemViewHolder holder = new TokenItemViewHolder(convertView);
holder.setIsRecyclable(false);
return holder;
}
public class TokenItemViewHolder extends RecyclerView.ViewHolder {
private ImageView logo;
private BaseTextView symbol;
private BaseTextView name;
private Button addRemoveButton;
public TokenItemViewHolder(View view) {
super(view);
logo = view.findViewById(R.id.token_icon);
symbol = view.findViewById(R.id.token_ticker);
name = view.findViewById(R.id.token_name);
addRemoveButton = view.findViewById(R.id.add_remove_button);
Typeface typeface = Typeface.createFromAsset(mContext.getAssets(), "fonts/CircularPro-Book.otf");
addRemoveButton.setTypeface(typeface);
}
}
public void resetFilter() {
mTokens = mBackupTokens;
notifyDataSetChanged();
}
public void filter(String query) {
resetFilter();
ArrayList<TokenItem> filteredList = new ArrayList<>();
query = query.toLowerCase();
for (TokenItem item : mTokens) {
if (item.name.toLowerCase().contains(query) || item.symbol.toLowerCase().contains(query)) {
filteredList.add(item);
}
}
mTokens = filteredList;
notifyDataSetChanged();
}
}
| app/src/main/java/com/breadwallet/tools/adapter/AddTokenListAdapter.java | package com.breadwallet.tools.adapter;
import android.app.Activity;
import android.content.Context;
import android.graphics.Typeface;
import android.support.annotation.NonNull;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.util.TypedValue;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageView;
import com.breadwallet.R;
import com.breadwallet.presenter.customviews.BaseTextView;
import com.breadwallet.presenter.entities.TokenItem;
import com.breadwallet.tools.util.BRConstants;
import java.util.ArrayList;
public class AddTokenListAdapter extends RecyclerView.Adapter<AddTokenListAdapter.TokenItemViewHolder> {
private Context mContext;
private ArrayList<TokenItem> mTokens;
private ArrayList<TokenItem> mBackupTokens;
private static final String TAG = AddTokenListAdapter.class.getSimpleName();
private OnTokenAddOrRemovedListener mListener;
public AddTokenListAdapter(Context context, ArrayList<TokenItem> tokens, OnTokenAddOrRemovedListener listener) {
this.mContext = context;
this.mTokens = tokens;
this.mListener = listener;
this.mBackupTokens = mTokens;
}
public interface OnTokenAddOrRemovedListener {
void onTokenAdded(TokenItem token);
void onTokenRemoved(TokenItem token);
}
@Override
public void onBindViewHolder(final @NonNull AddTokenListAdapter.TokenItemViewHolder holder, final int position) {
TokenItem item = mTokens.get(position);
String currencyCode = item.symbol.toLowerCase();
if (currencyCode.equals("1st")) {
currencyCode = "first";
}
String iconResourceName = currencyCode;
int iconResourceId = mContext.getResources().getIdentifier(currencyCode, BRConstants.DRAWABLE, mContext.getPackageName());
holder.name.setText(mTokens.get(position).name);
holder.symbol.setText(mTokens.get(position).symbol);
try {
holder.logo.setBackground(mContext.getDrawable(iconResourceId));
} catch (Exception e) {
e.printStackTrace();
Log.d(TAG, "Error finding icon for -> " + iconResourceName);
}
TypedValue addWalletTypedValue = new TypedValue();
TypedValue removeWalletTypedValue = new TypedValue();
mContext.getTheme().resolveAttribute(R.attr.add_wallet_button_background, addWalletTypedValue, true);
mContext.getTheme().resolveAttribute(R.attr.remove_wallet_button_background, removeWalletTypedValue, true);
holder.addRemoveButton.setText(mContext.getString(item.isAdded ? R.string.TokenList_remove : R.string.TokenList_add));
holder.addRemoveButton.setBackground(mContext.getDrawable(item.isAdded ? removeWalletTypedValue.resourceId : addWalletTypedValue.resourceId));
holder.addRemoveButton.setTextColor(mContext.getColor(item.isAdded ? R.color.button_cancel_add_wallet_text : R.color.button_add_wallet_text));
holder.addRemoveButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// Set button to "Remove"
if (!mTokens.get(position).isAdded) {
mTokens.get(position).isAdded = true;
mListener.onTokenAdded(mTokens.get(position));
} else {
// Set button back to "Add"
mTokens.get(position).isAdded = false;
mListener.onTokenRemoved(mTokens.get(position));
}
}
});
}
@Override
public int getItemCount() {
return mTokens.size();
}
@NonNull
@Override
public AddTokenListAdapter.TokenItemViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
LayoutInflater inflater = ((Activity) mContext).getLayoutInflater();
View convertView = inflater.inflate(R.layout.token_list_item, parent, false);
TokenItemViewHolder holder = new TokenItemViewHolder(convertView);
holder.setIsRecyclable(false);
return holder;
}
public class TokenItemViewHolder extends RecyclerView.ViewHolder {
private ImageView logo;
private BaseTextView symbol;
private BaseTextView name;
private Button addRemoveButton;
public TokenItemViewHolder(View view) {
super(view);
logo = view.findViewById(R.id.token_icon);
symbol = view.findViewById(R.id.token_ticker);
name = view.findViewById(R.id.token_name);
addRemoveButton = view.findViewById(R.id.add_remove_button);
Typeface typeface = Typeface.createFromAsset(mContext.getAssets(), "fonts/CircularPro-Book.otf");
addRemoveButton.setTypeface(typeface);
}
}
public void resetFilter() {
mTokens = mBackupTokens;
notifyDataSetChanged();
}
public void filter(String query) {
resetFilter();
ArrayList<TokenItem> filteredList = new ArrayList<>();
query = query.toLowerCase();
for (TokenItem item : mTokens) {
if (item.name.toLowerCase().contains(query) || item.symbol.toLowerCase().contains(query)) {
filteredList.add(item);
}
}
mTokens = filteredList;
notifyDataSetChanged();
}
}
| Fix DROID-698, sort add token list by symbol. Fix display to match mocks
| app/src/main/java/com/breadwallet/tools/adapter/AddTokenListAdapter.java | Fix DROID-698, sort add token list by symbol. Fix display to match mocks | <ide><path>pp/src/main/java/com/breadwallet/tools/adapter/AddTokenListAdapter.java
<ide> import com.breadwallet.tools.util.BRConstants;
<ide>
<ide> import java.util.ArrayList;
<add>import java.util.Collections;
<add>import java.util.Comparator;
<ide>
<ide> public class AddTokenListAdapter extends RecyclerView.Adapter<AddTokenListAdapter.TokenItemViewHolder> {
<ide>
<ide> this.mTokens = tokens;
<ide> this.mListener = listener;
<ide> this.mBackupTokens = mTokens;
<add>
<add> Collections.sort(mTokens, new Comparator<TokenItem>() {
<add> @Override
<add> public int compare(TokenItem one, TokenItem two) {
<add> return one.symbol.compareToIgnoreCase(two.symbol);
<add> }
<add> });
<add>
<ide> }
<ide>
<ide> public interface OnTokenAddOrRemovedListener {
<ide> String iconResourceName = currencyCode;
<ide> int iconResourceId = mContext.getResources().getIdentifier(currencyCode, BRConstants.DRAWABLE, mContext.getPackageName());
<ide>
<del> holder.name.setText(mTokens.get(position).name);
<del> holder.symbol.setText(mTokens.get(position).symbol);
<add> holder.name.setText(mTokens.get(position).symbol);
<add> holder.symbol.setText(mTokens.get(position).name);
<ide> try {
<ide> holder.logo.setBackground(mContext.getDrawable(iconResourceId));
<ide> } catch (Exception e) { |
|
Java | apache-2.0 | a2f20ffce65f9a46036e6b34ea9a788aedc2ff54 | 0 | rdoeffinger/DictionaryPC,rdoeffinger/DictionaryPC,rdoeffinger/DictionaryPC |
package com.hughes.android.dictionary.parser.wiktionary;
import com.hughes.android.dictionary.engine.EntryTypeName;
import com.hughes.android.dictionary.engine.HtmlEntry;
import com.hughes.android.dictionary.engine.IndexBuilder;
import com.hughes.android.dictionary.engine.IndexBuilder.TokenData;
import com.hughes.android.dictionary.engine.IndexedEntry;
import com.hughes.android.dictionary.parser.WikiTokenizer;
import com.hughes.util.StringUtil;
import org.apache.commons.lang3.StringEscapeUtils;
import java.net.URI;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
public class WholeSectionToHtmlParser extends AbstractWiktionaryParser {
public static final String NAME = "WholeSectionToHtmlParser";
interface LangConfig {
boolean skipSection(final String name);
EntryTypeName sectionNameToEntryType(String sectionName);
boolean skipWikiLink(final WikiTokenizer wikiTokenizer);
String adjustWikiLink(String wikiLinkDest, final String wikiLinkText);
void addFunctionCallbacks(
Map<String, FunctionCallback<WholeSectionToHtmlParser>> functionCallbacks);
}
static final Map<String,LangConfig> isoToLangConfig = new LinkedHashMap<String,LangConfig>();
static {
final Pattern enSkipSections = Pattern.compile(".*(Translations|Anagrams|References).*");
isoToLangConfig.put("EN", new LangConfig() {
@Override
public boolean skipSection(String headingText) {
return enSkipSections.matcher(headingText).matches();
}
@Override
public EntryTypeName sectionNameToEntryType(String sectionName) {
if (sectionName.equalsIgnoreCase("Synonyms")) {
return EntryTypeName.SYNONYM_MULTI;
}
if (sectionName.equalsIgnoreCase("Antonyms")) {
return EntryTypeName.ANTONYM_MULTI;
}
if (EnParser.partOfSpeechHeader.matcher(sectionName).matches()) {
// We need to put it in the other index, too (probably)
return null;
}
if (sectionName.equalsIgnoreCase("Derived Terms")) {
return null;
}
return null;
}
@Override
public boolean skipWikiLink(WikiTokenizer wikiTokenizer) {
final String wikiText = wikiTokenizer.wikiLinkText();
if (wikiText.startsWith("Category:")) {
return true;
}
return false;
}
@Override
public String adjustWikiLink(String wikiLinkDest, String wikiLinkText) {
if (wikiLinkDest.startsWith("w:") || wikiLinkDest.startsWith("Image:")) {
return null;
}
final int hashPos = wikiLinkDest.indexOf("#");
if (hashPos != -1) {
wikiLinkDest = wikiLinkDest.substring(0, hashPos);
if (wikiLinkDest.isEmpty()) {
wikiLinkDest = wikiLinkText;
}
}
return wikiLinkDest;
}
@Override
public void addFunctionCallbacks(
Map<String, FunctionCallback<WholeSectionToHtmlParser>> functionCallbacks) {
EnFunctionCallbacks.addGenericCallbacks(functionCallbacks);
}
});
final Pattern esSkipSections = Pattern.compile(".*(Traducciones|Locuciones).*");
isoToLangConfig.put("ES", new LangConfig() {
@Override
public boolean skipSection(String headingText) {
return esSkipSections.matcher(headingText).matches();
}
@Override
public EntryTypeName sectionNameToEntryType(String sectionName) {
if (sectionName.equalsIgnoreCase("sinónimo") || sectionName.equalsIgnoreCase("sinónimos")) {
return EntryTypeName.SYNONYM_MULTI;
}
if (sectionName.equalsIgnoreCase("antónimo") || sectionName.equalsIgnoreCase("antónimos")) {
return EntryTypeName.ANTONYM_MULTI;
}
return null;
}
@Override
public boolean skipWikiLink(WikiTokenizer wikiTokenizer) {
final String wikiText = wikiTokenizer.wikiLinkText();
if (wikiText.startsWith("Categoría:")) {
return true;
}
return false;
}
@Override
public String adjustWikiLink(String wikiLinkDest, String wikiLinkText) {
if (wikiLinkDest.startsWith("w:") || wikiLinkDest.startsWith("Image:")) {
return null;
}
final int hashPos = wikiLinkDest.indexOf("#");
if (hashPos != -1) {
wikiLinkDest = wikiLinkDest.substring(0, hashPos);
if (wikiLinkDest.isEmpty()) {
wikiLinkDest = wikiLinkText;
}
}
return wikiLinkDest;
}
@Override
public void addFunctionCallbacks(
Map<String, FunctionCallback<WholeSectionToHtmlParser>> functionCallbacks) {
// TODO: need Spanish variant
}
});
final Pattern ptSkipSections = Pattern.compile(".*Tradução.*");
isoToLangConfig.put("PT", new LangConfig() {
@Override
public boolean skipSection(String headingText) {
return esSkipSections.matcher(headingText).matches();
}
@Override
public EntryTypeName sectionNameToEntryType(String sectionName) {
if (sectionName.equalsIgnoreCase("Sinônimo") || sectionName.equalsIgnoreCase("Sinônimos")) {
return EntryTypeName.SYNONYM_MULTI;
}
if (sectionName.equalsIgnoreCase("Antônimo") || sectionName.equalsIgnoreCase("Antônimos")) {
return EntryTypeName.ANTONYM_MULTI;
}
return null;
}
@Override
public boolean skipWikiLink(WikiTokenizer wikiTokenizer) {
final String wikiText = wikiTokenizer.wikiLinkText();
if (wikiText.startsWith("Categoría:")) {
return true;
}
return false;
}
@Override
public String adjustWikiLink(String wikiLinkDest, String wikiLinkText) {
if (wikiLinkDest.startsWith("w:") || wikiLinkDest.startsWith("Image:")) {
return null;
}
final int hashPos = wikiLinkDest.indexOf("#");
if (hashPos != -1) {
wikiLinkDest = wikiLinkDest.substring(0, hashPos);
if (wikiLinkDest.isEmpty()) {
wikiLinkDest = wikiLinkText;
}
}
return wikiLinkDest;
}
@Override
public void addFunctionCallbacks(
Map<String, FunctionCallback<WholeSectionToHtmlParser>> functionCallbacks) {
// TODO: need Portuguese variant
}
});
final Pattern deSkipSections = Pattern.compile(".*(Übersetzungen|Referenzen|Quellen).*");
isoToLangConfig.put("DE", new LangConfig() {
@Override
public boolean skipSection(String headingText) {
return deSkipSections.matcher(headingText).matches();
}
@Override
public EntryTypeName sectionNameToEntryType(String sectionName) {
if (sectionName.equalsIgnoreCase("Synonyme")) {
return EntryTypeName.SYNONYM_MULTI;
}
if (sectionName.equalsIgnoreCase("Gegenwörter")) {
return EntryTypeName.ANTONYM_MULTI;
}
return null;
}
@Override
public boolean skipWikiLink(WikiTokenizer wikiTokenizer) {
final String wikiText = wikiTokenizer.wikiLinkText();
if (wikiText.startsWith("Kategorie:")) {
return true;
}
return false;
}
@Override
public String adjustWikiLink(String wikiLinkDest, String wikiLinkText) {
if (wikiLinkDest.startsWith("w:") || wikiLinkDest.startsWith("Image:")) {
return null;
}
final int hashPos = wikiLinkDest.indexOf("#");
if (hashPos != -1) {
wikiLinkDest = wikiLinkDest.substring(0, hashPos);
if (wikiLinkDest.isEmpty()) {
wikiLinkDest = wikiLinkText;
}
}
return wikiLinkDest;
}
@Override
public void addFunctionCallbacks(
Map<String, FunctionCallback<WholeSectionToHtmlParser>> functionCallbacks) {
DeFunctionCallbacks.addGenericCallbacks(functionCallbacks);
}
});
final Pattern itSkipSections = Pattern.compile(".*(Traduzione|Note / Riferimenti).*");
isoToLangConfig.put("IT", new LangConfig() {
@Override
public boolean skipSection(String headingText) {
return itSkipSections.matcher(headingText).matches();
}
@Override
public EntryTypeName sectionNameToEntryType(String sectionName) {
if (sectionName.equalsIgnoreCase("Sinonimi")) {
return EntryTypeName.SYNONYM_MULTI;
}
if (sectionName.equalsIgnoreCase("Antonimi/Contrari")) {
return EntryTypeName.ANTONYM_MULTI;
}
return null;
}
@Override
public boolean skipWikiLink(WikiTokenizer wikiTokenizer) {
final String wikiText = wikiTokenizer.wikiLinkText();
if (wikiText.startsWith("Categoria:")) {
return true;
}
return false;
}
@Override
public String adjustWikiLink(String wikiLinkDest, String wikiLinkText) {
if (wikiLinkDest.startsWith("w:") || wikiLinkDest.startsWith("Image:")) {
return null;
}
final int hashPos = wikiLinkDest.indexOf("#");
if (hashPos != -1) {
wikiLinkDest = wikiLinkDest.substring(0, hashPos);
if (wikiLinkDest.isEmpty()) {
wikiLinkDest = wikiLinkText;
}
}
return wikiLinkDest;
}
@Override
public void addFunctionCallbacks(
Map<String, FunctionCallback<WholeSectionToHtmlParser>> functionCallbacks) {
ItFunctionCallbacks.addGenericCallbacks(functionCallbacks);
}
});
final Pattern frSkipSections = Pattern.compile(".*([Tt]raductions|[Aa]nagrammes).*");
isoToLangConfig.put("FR", new LangConfig() {
@Override
public boolean skipSection(String headingText) {
return frSkipSections.matcher(headingText).matches();
}
@Override
public EntryTypeName sectionNameToEntryType(String sectionName) {
if (sectionName.equalsIgnoreCase("Synonymes")) {
return EntryTypeName.SYNONYM_MULTI;
}
if (sectionName.equalsIgnoreCase("Antonymes")) {
return EntryTypeName.ANTONYM_MULTI;
}
return null;
}
@Override
public boolean skipWikiLink(WikiTokenizer wikiTokenizer) {
final String wikiText = wikiTokenizer.wikiLinkText();
if (wikiText.startsWith("Catégorie:")) {
return true;
}
return false;
}
@Override
public String adjustWikiLink(String wikiLinkDest, String wikiLinkText) {
if (wikiLinkDest.startsWith("w:") || wikiLinkDest.startsWith("Image:")) {
return null;
}
final int hashPos = wikiLinkDest.indexOf("#");
if (hashPos != -1) {
wikiLinkDest = wikiLinkDest.substring(0, hashPos);
if (wikiLinkDest.isEmpty()) {
wikiLinkDest = wikiLinkText;
}
}
return wikiLinkDest;
}
@Override
public void addFunctionCallbacks(
Map<String, FunctionCallback<WholeSectionToHtmlParser>> functionCallbacks) {
FrFunctionCallbacks.addGenericCallbacks(functionCallbacks);
}
});
}
final IndexBuilder titleIndexBuilder;
final IndexBuilder defIndexBuilder;
final String skipLangIso;
final LangConfig langConfig;
final String webUrlTemplate;
public WholeSectionToHtmlParser(final IndexBuilder titleIndexBuilder, final IndexBuilder defIndexBuilder, final String wiktionaryIso, final String skipLangIso,
final String webUrlTemplate) {
this.titleIndexBuilder = titleIndexBuilder;
this.defIndexBuilder = defIndexBuilder;
assert isoToLangConfig.containsKey(wiktionaryIso): wiktionaryIso;
this.langConfig = isoToLangConfig.get(wiktionaryIso);
this.skipLangIso = skipLangIso;
this.webUrlTemplate = webUrlTemplate;
}
IndexedEntry indexedEntry = null;
@Override
public void parseSection(String heading, String text) {
assert entrySource != null;
final HtmlEntry htmlEntry = new HtmlEntry(entrySource, title);
indexedEntry = new IndexedEntry(htmlEntry);
final AppendAndIndexWikiCallback<WholeSectionToHtmlParser> callback = new AppendCallback(
this);
langConfig.addFunctionCallbacks(callback.functionCallbacks);
callback.builder = new StringBuilder();
callback.indexedEntry = indexedEntry;
callback.dispatch(text, null);
if (webUrlTemplate != null) {
final String webUrl = String.format(webUrlTemplate, title);
// URI.create can raise an exception e.g. if webUrl contains %, just ignore those cases.
try {
callback.builder.append(String.format("<p> <a href=\"%s\">%s</a>", URI.create(webUrl).toASCIIString(), escapeHtmlLiteral(webUrl)));
} catch (Exception e) {
}
}
htmlEntry.html = callback.builder.toString();
indexedEntry.isValid = true;
final TokenData tokenData = titleIndexBuilder.getOrCreateTokenData(title);
tokenData.hasMainEntry = true;
htmlEntry.addToDictionary(titleIndexBuilder.index.dict);
tokenData.htmlEntries.add(htmlEntry);
// titleIndexBuilder.addEntryWithString(indexedEntry, title,
// EntryTypeName.WIKTIONARY_TITLE_MULTI_DETAIL);
indexedEntry = null;
}
@Override
void removeUselessArgs(Map<String, String> namedArgs) {
}
@Override
public void addLinkToCurrentEntry(String token, final String lang, EntryTypeName entryTypeName) {
if (lang == null || lang.equals(skipLangIso)) {
titleIndexBuilder.addEntryWithString(indexedEntry, token, entryTypeName);
}
}
public static String escapeHtmlLiteral(final String plainText) {
final String htmlEscaped = StringEscapeUtils.escapeHtml3(plainText);
if (StringUtil.isAscii(htmlEscaped)) {
return htmlEscaped;
} else {
return StringUtil.escapeUnicodeToPureHtml(plainText);
}
}
class AppendCallback extends AppendAndIndexWikiCallback<WholeSectionToHtmlParser> {
public AppendCallback(WholeSectionToHtmlParser parser) {
super(parser);
}
@Override
public void onPlainText(String plainText) {
super.onPlainText(escapeHtmlLiteral(plainText));
}
@Override
public void onWikiLink(WikiTokenizer wikiTokenizer) {
if (wikiTokenizer.wikiLinkText().endsWith(":" + title)) {
// Skips wikilinks like: [[en::dick]]
return;
}
if (langConfig.skipWikiLink(wikiTokenizer)) {
return;
}
String linkDest;
if (wikiTokenizer.wikiLinkDest() != null) {
linkDest = langConfig.adjustWikiLink(wikiTokenizer.wikiLinkDest(), wikiTokenizer.wikiLinkText());
} else {
linkDest = wikiTokenizer.wikiLinkText();
}
if (sectionEntryTypeName != null) {
// TODO: inside a definition, this could be the wrong language.
titleIndexBuilder.addEntryWithString(indexedEntry, wikiTokenizer.wikiLinkText(), sectionEntryTypeName);
}
if (!StringUtil.isNullOrEmpty(linkDest)) {
builder.append(String.format("<a href=\"%s\">", HtmlEntry.formatQuickdicUrl("", linkDest)));
super.onWikiLink(wikiTokenizer);
builder.append(String.format("</a>"));
} else {
super.onWikiLink(wikiTokenizer);
}
}
@Override
public void onFunction(WikiTokenizer wikiTokenizer, String name,
List<String> args, Map<String, String> namedArgs) {
if (skipLangIso.equalsIgnoreCase(namedArgs.get("lang"))) {
namedArgs.remove("lang");
}
super.onFunction(wikiTokenizer, name, args, namedArgs);
}
@Override
public void onHtml(WikiTokenizer wikiTokenizer) {
super.onHtml(wikiTokenizer);
}
@Override
public void onNewline(WikiTokenizer wikiTokenizer) {
}
EntryTypeName sectionEntryTypeName;
IndexBuilder currentIndexBuilder;
@Override
public void onHeading(WikiTokenizer wikiTokenizer) {
final String headingText = wikiTokenizer.headingWikiText();
sectionEntryTypeName = langConfig.sectionNameToEntryType(headingText);
final int depth = wikiTokenizer.headingDepth();
if (langConfig.skipSection(headingText)) {
//System.out.println("Skipping section:" + headingText);
while ((wikiTokenizer = wikiTokenizer.nextToken()) != null) {
if (wikiTokenizer.isHeading() && wikiTokenizer.headingDepth() <= depth) {
// System.out.println("Resume on: " + wikiTokenizer.token());
wikiTokenizer.returnToLineStart();
return;
} else {
// System.out.println("Skipped: " + wikiTokenizer.token());
}
}
return;
}
builder.append(String.format("\n<h%d>", depth));
dispatch(headingText, null);
builder.append(String.format("</h%d>\n", depth));
}
final List<Character> listPrefixStack = new ArrayList<Character>();
@Override
public void onListItem(WikiTokenizer wikiTokenizer) {
if (builder.length() != 0 && builder.charAt(builder.length() - 1) != '\n') {
builder.append("\n");
}
final String prefix = wikiTokenizer.listItemPrefix();
while (listPrefixStack.size() < prefix.length()) {
builder.append(String.format("<%s>",
WikiTokenizer.getListTag(prefix.charAt(listPrefixStack.size()))));
listPrefixStack.add(prefix.charAt(listPrefixStack.size()));
}
builder.append("<li>");
dispatch(wikiTokenizer.listItemWikiText(), null);
builder.append("</li>\n");
WikiTokenizer nextToken = wikiTokenizer.nextToken();
boolean returnToLineStart = false;
if (nextToken != null && nextToken.isNewline()) {
nextToken = nextToken.nextToken();
returnToLineStart = true;
}
final String nextListHeader;
if (nextToken == null || !nextToken.isListItem()) {
nextListHeader = "";
} else {
nextListHeader = nextToken.listItemPrefix();
}
if (returnToLineStart) {
wikiTokenizer.returnToLineStart();
}
while (listPrefixStack.size() > nextListHeader.length()) {
final char prefixChar = listPrefixStack.remove(listPrefixStack.size() - 1);
builder.append(String.format("</%s>\n", WikiTokenizer.getListTag(prefixChar)));
}
}
boolean boldOn = false;
boolean italicOn = false;
@Override
public void onMarkup(WikiTokenizer wikiTokenizer) {
if ("'''".equals(wikiTokenizer.token())) {
if (!boldOn) {
builder.append("<b>");
} else {
builder.append("</b>");
}
boldOn = !boldOn;
} else if ("''".equals(wikiTokenizer.token())) {
if (!italicOn) {
builder.append("<em>");
} else {
builder.append("</em>");
}
italicOn = !italicOn;
} else {
assert false;
}
}
}
}
| src/com/hughes/android/dictionary/parser/wiktionary/WholeSectionToHtmlParser.java |
package com.hughes.android.dictionary.parser.wiktionary;
import com.hughes.android.dictionary.engine.EntryTypeName;
import com.hughes.android.dictionary.engine.HtmlEntry;
import com.hughes.android.dictionary.engine.IndexBuilder;
import com.hughes.android.dictionary.engine.IndexBuilder.TokenData;
import com.hughes.android.dictionary.engine.IndexedEntry;
import com.hughes.android.dictionary.parser.WikiTokenizer;
import com.hughes.util.StringUtil;
import org.apache.commons.lang3.StringEscapeUtils;
import java.net.URI;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
public class WholeSectionToHtmlParser extends AbstractWiktionaryParser {
public static final String NAME = "WholeSectionToHtmlParser";
interface LangConfig {
boolean skipSection(final String name);
EntryTypeName sectionNameToEntryType(String sectionName);
boolean skipWikiLink(final WikiTokenizer wikiTokenizer);
String adjustWikiLink(String wikiLinkDest, final String wikiLinkText);
void addFunctionCallbacks(
Map<String, FunctionCallback<WholeSectionToHtmlParser>> functionCallbacks);
}
static final Map<String,LangConfig> isoToLangConfig = new LinkedHashMap<String,LangConfig>();
static {
final Pattern enSkipSections = Pattern.compile(".*(Translations|Anagrams|References).*");
isoToLangConfig.put("EN", new LangConfig() {
@Override
public boolean skipSection(String headingText) {
return enSkipSections.matcher(headingText).matches();
}
@Override
public EntryTypeName sectionNameToEntryType(String sectionName) {
if (sectionName.equalsIgnoreCase("Synonyms")) {
return EntryTypeName.SYNONYM_MULTI;
}
if (sectionName.equalsIgnoreCase("Antonyms")) {
return EntryTypeName.ANTONYM_MULTI;
}
if (EnParser.partOfSpeechHeader.matcher(sectionName).matches()) {
// We need to put it in the other index, too (probably)
return null;
}
if (sectionName.equalsIgnoreCase("Derived Terms")) {
return null;
}
return null;
}
@Override
public boolean skipWikiLink(WikiTokenizer wikiTokenizer) {
final String wikiText = wikiTokenizer.wikiLinkText();
if (wikiText.startsWith("Category:")) {
return true;
}
return false;
}
@Override
public String adjustWikiLink(String wikiLinkDest, String wikiLinkText) {
if (wikiLinkDest.startsWith("w:") || wikiLinkDest.startsWith("Image:")) {
return null;
}
final int hashPos = wikiLinkDest.indexOf("#");
if (hashPos != -1) {
wikiLinkDest = wikiLinkDest.substring(0, hashPos);
if (wikiLinkDest.isEmpty()) {
wikiLinkDest = wikiLinkText;
}
}
return wikiLinkDest;
}
@Override
public void addFunctionCallbacks(
Map<String, FunctionCallback<WholeSectionToHtmlParser>> functionCallbacks) {
EnFunctionCallbacks.addGenericCallbacks(functionCallbacks);
}
});
final Pattern esSkipSections = Pattern.compile(".*(Traducciones|Locuciones).*");
isoToLangConfig.put("ES", new LangConfig() {
@Override
public boolean skipSection(String headingText) {
return esSkipSections.matcher(headingText).matches();
}
@Override
public EntryTypeName sectionNameToEntryType(String sectionName) {
if (sectionName.equalsIgnoreCase("sinónimo") || sectionName.equalsIgnoreCase("sinónimos")) {
return EntryTypeName.SYNONYM_MULTI;
}
if (sectionName.equalsIgnoreCase("antónimo") || sectionName.equalsIgnoreCase("antónimos")) {
return EntryTypeName.ANTONYM_MULTI;
}
return null;
}
@Override
public boolean skipWikiLink(WikiTokenizer wikiTokenizer) {
final String wikiText = wikiTokenizer.wikiLinkText();
if (wikiText.startsWith("Categoría:")) {
return true;
}
return false;
}
@Override
public String adjustWikiLink(String wikiLinkDest, String wikiLinkText) {
if (wikiLinkDest.startsWith("w:") || wikiLinkDest.startsWith("Image:")) {
return null;
}
final int hashPos = wikiLinkDest.indexOf("#");
if (hashPos != -1) {
wikiLinkDest = wikiLinkDest.substring(0, hashPos);
if (wikiLinkDest.isEmpty()) {
wikiLinkDest = wikiLinkText;
}
}
return wikiLinkDest;
}
@Override
public void addFunctionCallbacks(
Map<String, FunctionCallback<WholeSectionToHtmlParser>> functionCallbacks) {
// TODO: need Spanish variant
}
});
final Pattern deSkipSections = Pattern.compile(".*(Übersetzungen|Referenzen|Quellen).*");
isoToLangConfig.put("DE", new LangConfig() {
@Override
public boolean skipSection(String headingText) {
return deSkipSections.matcher(headingText).matches();
}
@Override
public EntryTypeName sectionNameToEntryType(String sectionName) {
if (sectionName.equalsIgnoreCase("Synonyme")) {
return EntryTypeName.SYNONYM_MULTI;
}
if (sectionName.equalsIgnoreCase("Gegenwörter")) {
return EntryTypeName.ANTONYM_MULTI;
}
return null;
}
@Override
public boolean skipWikiLink(WikiTokenizer wikiTokenizer) {
final String wikiText = wikiTokenizer.wikiLinkText();
if (wikiText.startsWith("Kategorie:")) {
return true;
}
return false;
}
@Override
public String adjustWikiLink(String wikiLinkDest, String wikiLinkText) {
if (wikiLinkDest.startsWith("w:") || wikiLinkDest.startsWith("Image:")) {
return null;
}
final int hashPos = wikiLinkDest.indexOf("#");
if (hashPos != -1) {
wikiLinkDest = wikiLinkDest.substring(0, hashPos);
if (wikiLinkDest.isEmpty()) {
wikiLinkDest = wikiLinkText;
}
}
return wikiLinkDest;
}
@Override
public void addFunctionCallbacks(
Map<String, FunctionCallback<WholeSectionToHtmlParser>> functionCallbacks) {
DeFunctionCallbacks.addGenericCallbacks(functionCallbacks);
}
});
final Pattern itSkipSections = Pattern.compile(".*(Traduzione|Note / Riferimenti).*");
isoToLangConfig.put("IT", new LangConfig() {
@Override
public boolean skipSection(String headingText) {
return itSkipSections.matcher(headingText).matches();
}
@Override
public EntryTypeName sectionNameToEntryType(String sectionName) {
if (sectionName.equalsIgnoreCase("Sinonimi")) {
return EntryTypeName.SYNONYM_MULTI;
}
if (sectionName.equalsIgnoreCase("Antonimi/Contrari")) {
return EntryTypeName.ANTONYM_MULTI;
}
return null;
}
@Override
public boolean skipWikiLink(WikiTokenizer wikiTokenizer) {
final String wikiText = wikiTokenizer.wikiLinkText();
if (wikiText.startsWith("Categoria:")) {
return true;
}
return false;
}
@Override
public String adjustWikiLink(String wikiLinkDest, String wikiLinkText) {
if (wikiLinkDest.startsWith("w:") || wikiLinkDest.startsWith("Image:")) {
return null;
}
final int hashPos = wikiLinkDest.indexOf("#");
if (hashPos != -1) {
wikiLinkDest = wikiLinkDest.substring(0, hashPos);
if (wikiLinkDest.isEmpty()) {
wikiLinkDest = wikiLinkText;
}
}
return wikiLinkDest;
}
@Override
public void addFunctionCallbacks(
Map<String, FunctionCallback<WholeSectionToHtmlParser>> functionCallbacks) {
ItFunctionCallbacks.addGenericCallbacks(functionCallbacks);
}
});
final Pattern frSkipSections = Pattern.compile(".*([Tt]raductions|[Aa]nagrammes).*");
isoToLangConfig.put("FR", new LangConfig() {
@Override
public boolean skipSection(String headingText) {
return frSkipSections.matcher(headingText).matches();
}
@Override
public EntryTypeName sectionNameToEntryType(String sectionName) {
if (sectionName.equalsIgnoreCase("Synonymes")) {
return EntryTypeName.SYNONYM_MULTI;
}
if (sectionName.equalsIgnoreCase("Antonymes")) {
return EntryTypeName.ANTONYM_MULTI;
}
return null;
}
@Override
public boolean skipWikiLink(WikiTokenizer wikiTokenizer) {
final String wikiText = wikiTokenizer.wikiLinkText();
if (wikiText.startsWith("Catégorie:")) {
return true;
}
return false;
}
@Override
public String adjustWikiLink(String wikiLinkDest, String wikiLinkText) {
if (wikiLinkDest.startsWith("w:") || wikiLinkDest.startsWith("Image:")) {
return null;
}
final int hashPos = wikiLinkDest.indexOf("#");
if (hashPos != -1) {
wikiLinkDest = wikiLinkDest.substring(0, hashPos);
if (wikiLinkDest.isEmpty()) {
wikiLinkDest = wikiLinkText;
}
}
return wikiLinkDest;
}
@Override
public void addFunctionCallbacks(
Map<String, FunctionCallback<WholeSectionToHtmlParser>> functionCallbacks) {
FrFunctionCallbacks.addGenericCallbacks(functionCallbacks);
}
});
}
final IndexBuilder titleIndexBuilder;
final IndexBuilder defIndexBuilder;
final String skipLangIso;
final LangConfig langConfig;
final String webUrlTemplate;
public WholeSectionToHtmlParser(final IndexBuilder titleIndexBuilder, final IndexBuilder defIndexBuilder, final String wiktionaryIso, final String skipLangIso,
final String webUrlTemplate) {
this.titleIndexBuilder = titleIndexBuilder;
this.defIndexBuilder = defIndexBuilder;
assert isoToLangConfig.containsKey(wiktionaryIso): wiktionaryIso;
this.langConfig = isoToLangConfig.get(wiktionaryIso);
this.skipLangIso = skipLangIso;
this.webUrlTemplate = webUrlTemplate;
}
IndexedEntry indexedEntry = null;
@Override
public void parseSection(String heading, String text) {
assert entrySource != null;
final HtmlEntry htmlEntry = new HtmlEntry(entrySource, title);
indexedEntry = new IndexedEntry(htmlEntry);
final AppendAndIndexWikiCallback<WholeSectionToHtmlParser> callback = new AppendCallback(
this);
langConfig.addFunctionCallbacks(callback.functionCallbacks);
callback.builder = new StringBuilder();
callback.indexedEntry = indexedEntry;
callback.dispatch(text, null);
if (webUrlTemplate != null) {
final String webUrl = String.format(webUrlTemplate, title);
// URI.create can raise an exception e.g. if webUrl contains %, just ignore those cases.
try {
callback.builder.append(String.format("<p> <a href=\"%s\">%s</a>", URI.create(webUrl).toASCIIString(), escapeHtmlLiteral(webUrl)));
} catch (Exception e) {
}
}
htmlEntry.html = callback.builder.toString();
indexedEntry.isValid = true;
final TokenData tokenData = titleIndexBuilder.getOrCreateTokenData(title);
tokenData.hasMainEntry = true;
htmlEntry.addToDictionary(titleIndexBuilder.index.dict);
tokenData.htmlEntries.add(htmlEntry);
// titleIndexBuilder.addEntryWithString(indexedEntry, title,
// EntryTypeName.WIKTIONARY_TITLE_MULTI_DETAIL);
indexedEntry = null;
}
@Override
void removeUselessArgs(Map<String, String> namedArgs) {
}
@Override
public void addLinkToCurrentEntry(String token, final String lang, EntryTypeName entryTypeName) {
if (lang == null || lang.equals(skipLangIso)) {
titleIndexBuilder.addEntryWithString(indexedEntry, token, entryTypeName);
}
}
public static String escapeHtmlLiteral(final String plainText) {
final String htmlEscaped = StringEscapeUtils.escapeHtml3(plainText);
if (StringUtil.isAscii(htmlEscaped)) {
return htmlEscaped;
} else {
return StringUtil.escapeUnicodeToPureHtml(plainText);
}
}
class AppendCallback extends AppendAndIndexWikiCallback<WholeSectionToHtmlParser> {
public AppendCallback(WholeSectionToHtmlParser parser) {
super(parser);
}
@Override
public void onPlainText(String plainText) {
super.onPlainText(escapeHtmlLiteral(plainText));
}
@Override
public void onWikiLink(WikiTokenizer wikiTokenizer) {
if (wikiTokenizer.wikiLinkText().endsWith(":" + title)) {
// Skips wikilinks like: [[en::dick]]
return;
}
if (langConfig.skipWikiLink(wikiTokenizer)) {
return;
}
String linkDest;
if (wikiTokenizer.wikiLinkDest() != null) {
linkDest = langConfig.adjustWikiLink(wikiTokenizer.wikiLinkDest(), wikiTokenizer.wikiLinkText());
} else {
linkDest = wikiTokenizer.wikiLinkText();
}
if (sectionEntryTypeName != null) {
// TODO: inside a definition, this could be the wrong language.
titleIndexBuilder.addEntryWithString(indexedEntry, wikiTokenizer.wikiLinkText(), sectionEntryTypeName);
}
if (!StringUtil.isNullOrEmpty(linkDest)) {
builder.append(String.format("<a href=\"%s\">", HtmlEntry.formatQuickdicUrl("", linkDest)));
super.onWikiLink(wikiTokenizer);
builder.append(String.format("</a>"));
} else {
super.onWikiLink(wikiTokenizer);
}
}
@Override
public void onFunction(WikiTokenizer wikiTokenizer, String name,
List<String> args, Map<String, String> namedArgs) {
if (skipLangIso.equalsIgnoreCase(namedArgs.get("lang"))) {
namedArgs.remove("lang");
}
super.onFunction(wikiTokenizer, name, args, namedArgs);
}
@Override
public void onHtml(WikiTokenizer wikiTokenizer) {
super.onHtml(wikiTokenizer);
}
@Override
public void onNewline(WikiTokenizer wikiTokenizer) {
}
EntryTypeName sectionEntryTypeName;
IndexBuilder currentIndexBuilder;
@Override
public void onHeading(WikiTokenizer wikiTokenizer) {
final String headingText = wikiTokenizer.headingWikiText();
sectionEntryTypeName = langConfig.sectionNameToEntryType(headingText);
final int depth = wikiTokenizer.headingDepth();
if (langConfig.skipSection(headingText)) {
//System.out.println("Skipping section:" + headingText);
while ((wikiTokenizer = wikiTokenizer.nextToken()) != null) {
if (wikiTokenizer.isHeading() && wikiTokenizer.headingDepth() <= depth) {
// System.out.println("Resume on: " + wikiTokenizer.token());
wikiTokenizer.returnToLineStart();
return;
} else {
// System.out.println("Skipped: " + wikiTokenizer.token());
}
}
return;
}
builder.append(String.format("\n<h%d>", depth));
dispatch(headingText, null);
builder.append(String.format("</h%d>\n", depth));
}
final List<Character> listPrefixStack = new ArrayList<Character>();
@Override
public void onListItem(WikiTokenizer wikiTokenizer) {
if (builder.length() != 0 && builder.charAt(builder.length() - 1) != '\n') {
builder.append("\n");
}
final String prefix = wikiTokenizer.listItemPrefix();
while (listPrefixStack.size() < prefix.length()) {
builder.append(String.format("<%s>",
WikiTokenizer.getListTag(prefix.charAt(listPrefixStack.size()))));
listPrefixStack.add(prefix.charAt(listPrefixStack.size()));
}
builder.append("<li>");
dispatch(wikiTokenizer.listItemWikiText(), null);
builder.append("</li>\n");
WikiTokenizer nextToken = wikiTokenizer.nextToken();
boolean returnToLineStart = false;
if (nextToken != null && nextToken.isNewline()) {
nextToken = nextToken.nextToken();
returnToLineStart = true;
}
final String nextListHeader;
if (nextToken == null || !nextToken.isListItem()) {
nextListHeader = "";
} else {
nextListHeader = nextToken.listItemPrefix();
}
if (returnToLineStart) {
wikiTokenizer.returnToLineStart();
}
while (listPrefixStack.size() > nextListHeader.length()) {
final char prefixChar = listPrefixStack.remove(listPrefixStack.size() - 1);
builder.append(String.format("</%s>\n", WikiTokenizer.getListTag(prefixChar)));
}
}
boolean boldOn = false;
boolean italicOn = false;
@Override
public void onMarkup(WikiTokenizer wikiTokenizer) {
if ("'''".equals(wikiTokenizer.token())) {
if (!boldOn) {
builder.append("<b>");
} else {
builder.append("</b>");
}
boldOn = !boldOn;
} else if ("''".equals(wikiTokenizer.token())) {
if (!italicOn) {
builder.append("<em>");
} else {
builder.append("</em>");
}
italicOn = !italicOn;
} else {
assert false;
}
}
}
}
| Fix crash in dictionary generation for PT input.
| src/com/hughes/android/dictionary/parser/wiktionary/WholeSectionToHtmlParser.java | Fix crash in dictionary generation for PT input. | <ide><path>rc/com/hughes/android/dictionary/parser/wiktionary/WholeSectionToHtmlParser.java
<ide> public void addFunctionCallbacks(
<ide> Map<String, FunctionCallback<WholeSectionToHtmlParser>> functionCallbacks) {
<ide> // TODO: need Spanish variant
<add> }
<add> });
<add>
<add> final Pattern ptSkipSections = Pattern.compile(".*Tradução.*");
<add> isoToLangConfig.put("PT", new LangConfig() {
<add> @Override
<add> public boolean skipSection(String headingText) {
<add> return esSkipSections.matcher(headingText).matches();
<add> }
<add>
<add> @Override
<add> public EntryTypeName sectionNameToEntryType(String sectionName) {
<add> if (sectionName.equalsIgnoreCase("Sinônimo") || sectionName.equalsIgnoreCase("Sinônimos")) {
<add> return EntryTypeName.SYNONYM_MULTI;
<add> }
<add> if (sectionName.equalsIgnoreCase("Antônimo") || sectionName.equalsIgnoreCase("Antônimos")) {
<add> return EntryTypeName.ANTONYM_MULTI;
<add> }
<add> return null;
<add> }
<add>
<add> @Override
<add> public boolean skipWikiLink(WikiTokenizer wikiTokenizer) {
<add> final String wikiText = wikiTokenizer.wikiLinkText();
<add> if (wikiText.startsWith("Categoría:")) {
<add> return true;
<add> }
<add> return false;
<add> }
<add> @Override
<add> public String adjustWikiLink(String wikiLinkDest, String wikiLinkText) {
<add> if (wikiLinkDest.startsWith("w:") || wikiLinkDest.startsWith("Image:")) {
<add> return null;
<add> }
<add> final int hashPos = wikiLinkDest.indexOf("#");
<add> if (hashPos != -1) {
<add> wikiLinkDest = wikiLinkDest.substring(0, hashPos);
<add> if (wikiLinkDest.isEmpty()) {
<add> wikiLinkDest = wikiLinkText;
<add> }
<add> }
<add> return wikiLinkDest;
<add> }
<add>
<add> @Override
<add> public void addFunctionCallbacks(
<add> Map<String, FunctionCallback<WholeSectionToHtmlParser>> functionCallbacks) {
<add> // TODO: need Portuguese variant
<ide> }
<ide> });
<ide> |
|
Java | apache-2.0 | e54fc8e665811a570f443030de46f7553bfda819 | 0 | Dakror/Arise,Dakror/Arise | package de.dakror.arise.net;
import java.io.File;
import java.net.BindException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.SocketException;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.concurrent.CopyOnWriteArrayList;
import de.dakror.arise.AriseServer;
import de.dakror.arise.game.Game;
import de.dakror.arise.net.packet.Packet;
import de.dakror.arise.net.packet.Packet.PacketTypes;
import de.dakror.arise.net.packet.Packet00Handshake;
import de.dakror.arise.net.packet.Packet01Login;
import de.dakror.arise.net.packet.Packet01Login.Response;
import de.dakror.arise.net.packet.Packet02Disconnect;
import de.dakror.arise.net.packet.Packet02Disconnect.Cause;
import de.dakror.arise.net.packet.Packet03World;
import de.dakror.arise.net.packet.Packet04City;
import de.dakror.arise.net.packet.Packet05Resources;
import de.dakror.arise.net.packet.Packet06Building;
import de.dakror.arise.net.packet.Packet07RenameCity;
import de.dakror.arise.net.packet.Packet08PlaceBuilding;
import de.dakror.arise.net.packet.Packet09BuildingStageChange;
import de.dakror.arise.net.packet.Packet10Attribute;
import de.dakror.arise.net.packet.Packet11DeconstructBuilding;
import de.dakror.arise.server.DBManager;
import de.dakror.arise.server.ServerUpdater;
import de.dakror.arise.settings.CFG;
import de.dakror.gamesetup.util.Helper;
/**
* @author Dakror
*/
public class Server extends Thread
{
public static Server currentServer;
public static final int PORT = 14744;
public static final int PACKETSIZE = 255; // bytes
public static File dir;
public boolean running;
public CopyOnWriteArrayList<User> clients = new CopyOnWriteArrayList<>();
ServerUpdater updater;
DatagramSocket socket;
public Server(InetAddress ip)
{
currentServer = this;
try
{
dir = new File(CFG.DIR, "Server");
dir.mkdir();
socket = new DatagramSocket(new InetSocketAddress(ip, Server.PORT));
setName("Server-Thread");
setPriority(MAX_PRIORITY);
out("Connecting to database");
DBManager.init();
updater = new ServerUpdater();
out("Fetching configuration");
Game.loadConfig();
out("Starting server at " + socket.getLocalAddress().getHostAddress() + ":" + socket.getLocalPort());
start();
}
catch (BindException e)
{
err("There is a server already running on this machine!");
}
catch (SocketException e)
{
e.printStackTrace();
}
}
@Override
public void run()
{
running = true;
while (running)
{
byte[] data = new byte[PACKETSIZE];
DatagramPacket packet = new DatagramPacket(data, data.length);
try
{
socket.receive(packet);
parsePacket(data, packet.getAddress(), packet.getPort());
}
catch (SocketException e)
{}
catch (Exception e)
{
e.printStackTrace();
}
}
}
public void parsePacket(byte[] data, InetAddress address, int port)
{
PacketTypes type = Packet.lookupPacket(data[0]);
User user = getUserForIP(address, port);
if (user != null) user.interact();
AriseServer.trafficLog.setText(AriseServer.trafficLog.getText() + new SimpleDateFormat("'['HH:mm:ss']: '").format(new Date()) + "< " + address.getHostAddress() + ":" + port + " " + type.name() + "\n");
AriseServer.trafficLog.setCaretPosition(AriseServer.trafficLog.getDocument().getLength());
switch (type)
{
case INVALID:
{
err("Received invalid packet: " + new String(data));
break;
}
case HANDSHAKE:
{
try
{
sendPacket(new Packet00Handshake(), user == null ? new User(0, 0, address, port) : user);
if (user == null) out("Shook hands with: " + address.getHostAddress() + ":" + port);
break;
}
catch (Exception e)
{
e.printStackTrace();
}
}
case LOGIN:
{
try
{
Packet01Login p = new Packet01Login(data);
String s = Helper.getURLContent(new URL("http://dakror.de/mp-api/login_noip.php?username=" + p.getUsername() + "&password=" + p.getPwdMd5()));
boolean loggedIn = s.contains("true");
boolean worldExists = DBManager.getWorldForId(p.getWorldId()).getId() != -1;
if (loggedIn && worldExists)
{
String[] parts = s.split(":");
User u = new User(Integer.parseInt(parts[1].trim()), p.getWorldId(), address, port);
boolean alreadyLoggedIn = getUserForId(u.getId()) != null;
if (alreadyLoggedIn)
{
out("Refused login of " + address.getHostAddress() + ":" + port + " (" + Response.ALREADY_LOGGED_IN.name() + ")");
sendPacket(new Packet01Login(p.getUsername(), 0, p.getWorldId(), Response.ALREADY_LOGGED_IN), u);
}
else
{
out("User " + parts[2].trim() + " (#" + u.getId() + ")" + " logged in on world #" + p.getWorldId() + ".");
sendPacket(new Packet01Login(parts[2], u.getId(), p.getWorldId(), Response.LOGIN_OK), u);
clients.add(u);
}
}
else
{
out("Refused login of " + address.getHostAddress() + ":" + port + " (" + (!loggedIn ? Response.BAD_LOGIN : Response.BAD_WORLD_ID).name() + ")");
sendPacket(new Packet01Login(p.getUsername(), 0, p.getWorldId(), !loggedIn ? Response.BAD_LOGIN : Response.BAD_WORLD_ID), new User(0, 0, address, port));
}
}
catch (Exception e)
{
e.printStackTrace();
}
break;
}
case DISCONNECT:
{
Packet02Disconnect p = new Packet02Disconnect(data);
for (User u : clients)
{
if (u.getId() == p.getUserId() && address.equals(u.getIP()))
{
try
{
sendPacket(new Packet02Disconnect(0, Cause.SERVER_CONFIRMED), u);
out("User disconnected: #" + u.getId() + " (" + p.getCause().name() + ")");
clients.remove(u);
}
catch (Exception e)
{
e.printStackTrace();
}
}
}
break;
}
case WORLD:
{
try
{
Packet03World p = new Packet03World(data);
boolean spawn = DBManager.spawnPlayer(p.getId(), user);
out("Player's first visit on world? " + spawn);
sendPacket(DBManager.getWorldForId(p.getId()), user);
sendPacketToAllClientsExceptOne(DBManager.getSpawnCity(p.getId(), user.getId()), user);
break;
}
catch (Exception e)
{
e.printStackTrace();
}
}
case CITY:
{
Packet04City p = new Packet04City(data);
try
{
for (Packet04City packet : DBManager.getCities(p.getWorldId()))
sendPacket(packet, user);
}
catch (Exception e)
{
e.printStackTrace();
}
break;
}
case RESOURCES:
{
try
{
Packet05Resources p = new Packet05Resources(data);
if (DBManager.isCityFromUser(p.getCityId(), user)) sendPacket(new Packet05Resources(p.getCityId(), DBManager.getCityResources(p.getCityId())), user);
break;
}
catch (Exception e)
{
e.printStackTrace();
}
}
case BUILDING:
{
Packet06Building p = new Packet06Building(data);
if (p.getBuildingType() == 0 && DBManager.isCityFromUser(p.getCityId(), user))
{
try
{
for (Packet06Building packet : DBManager.getCityBuildings(p.getCityId()))
sendPacket(packet, user);
}
catch (Exception e)
{
e.printStackTrace();
}
}
break;
}
case RENAMECITY:
{
Packet07RenameCity p = new Packet07RenameCity(data);
if (DBManager.isCityFromUser(p.getCityId(), user))
{
boolean worked = DBManager.renameCity(p.getCityId(), p.getNewName(), user);
try
{
sendPacket(new Packet07RenameCity(p.getCityId(), worked ? p.getNewName() : "#false#"), getUserForIP(address, port));
}
catch (Exception e)
{
e.printStackTrace();
}
}
break;
}
case PLACEBUILDING:
{
Packet08PlaceBuilding p = new Packet08PlaceBuilding(data);
if (DBManager.isCityFromUser(p.getCityId(), user))
{
int id = DBManager.placeBuilding(p.getCityId(), p.getBuildingType(), p.getX(), p.getY());
if (id != 0)
{
try
{
sendPacket(DBManager.getCityBuilding(p.getCityId(), id), user);
sendPacket(new Packet05Resources(p.getCityId(), DBManager.getCityResources(p.getCityId())), user);
}
catch (Exception e)
{
e.printStackTrace();
}
}
}
break;
}
case ATTRIBUTE:
{
Packet10Attribute p = new Packet10Attribute(data);
if (user != null)
{
if (p.getKey().equals("city")) user.setCity(Integer.parseInt(p.getValue()));
}
break;
}
case DECONSTRUCTBUILDING:
{
Packet11DeconstructBuilding p = new Packet11DeconstructBuilding(data);
if (DBManager.isCityFromUser(p.getCityId(), user))
{
int timeleft = 0;
if ((timeleft = DBManager.deconstructBuilding(p.getCityId(), p.getBuildingId())) > -1)
{
try
{
sendPacket(new Packet09BuildingStageChange(p.getBuildingId(), p.getCityId(), 2, timeleft), user);
sendPacket(new Packet05Resources(p.getCityId(), DBManager.getCityResources(p.getCityId())), user);
}
catch (Exception e)
{
e.printStackTrace();
}
}
}
break;
}
default:
err("Received unhandled packet (" + address.getHostAddress() + ":" + port + ") " + type + " [" + Packet.readData(data) + "]");
}
}
public void sendPacketToAllClients(Packet p) throws Exception
{
for (User u : clients)
sendPacket(p, u);
}
public void sendPacketToAllClientsExceptOne(Packet p, User exception) throws Exception
{
for (User u : clients)
{
if (exception.getId() == 0)
{
if (exception.getIP().equals(u.getIP()) && exception.getPort() == u.getPort()) continue;
}
else if (exception.getId() == u.getId()) continue;
sendPacket(p, u);
}
}
public void sendPacket(Packet p, User u) throws Exception
{
if (u == null) throw new NullPointerException("user = null");
byte[] data = p.getData();
DatagramPacket packet = new DatagramPacket(data, data.length, u.getIP(), u.getPort());
socket.send(packet);
AriseServer.trafficLog.setText(AriseServer.trafficLog.getText() + new SimpleDateFormat("'['HH:mm:ss']: '").format(new Date()) + "> " + u.getIP().getHostAddress() + ":" + u.getPort() + " " + p.getType().name() + "\n");
AriseServer.trafficLog.setCaretPosition(AriseServer.trafficLog.getDocument().getLength());
}
public User getUserForIP(InetAddress address, int port)
{
for (User u : clients)
if (u.getIP().equals(address) && u.getPort() == port) return u;
return null;
}
public User getUserForId(int id)
{
for (User u : clients)
if (u.getId() == id) return u;
return null;
}
public void shutdown()
{
try
{
sendPacketToAllClients(new Packet02Disconnect(0, Packet02Disconnect.Cause.SERVER_CLOSED));
}
catch (Exception e)
{
e.printStackTrace();
}
running = false;
socket.close();
}
public static void out(Object... p)
{
String timestamp = new SimpleDateFormat("'['HH:mm:ss']: '").format(new Date());
if (p.length == 1) System.out.println(timestamp + p[0]);
else System.out.println(timestamp + Arrays.toString(p));
}
public static void err(Object... p)
{
String timestamp = new SimpleDateFormat("'['HH:mm:ss']: '").format(new Date());
if (p.length == 1) System.err.println(timestamp + p[0]);
else System.err.println(timestamp + Arrays.toString(p));
}
}
| src/main/java/de/dakror/arise/net/Server.java | package de.dakror.arise.net;
import java.io.File;
import java.net.BindException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.SocketException;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.concurrent.CopyOnWriteArrayList;
import de.dakror.arise.AriseServer;
import de.dakror.arise.game.Game;
import de.dakror.arise.net.packet.Packet;
import de.dakror.arise.net.packet.Packet.PacketTypes;
import de.dakror.arise.net.packet.Packet00Handshake;
import de.dakror.arise.net.packet.Packet01Login;
import de.dakror.arise.net.packet.Packet01Login.Response;
import de.dakror.arise.net.packet.Packet02Disconnect;
import de.dakror.arise.net.packet.Packet02Disconnect.Cause;
import de.dakror.arise.net.packet.Packet03World;
import de.dakror.arise.net.packet.Packet04City;
import de.dakror.arise.net.packet.Packet05Resources;
import de.dakror.arise.net.packet.Packet06Building;
import de.dakror.arise.net.packet.Packet07RenameCity;
import de.dakror.arise.net.packet.Packet08PlaceBuilding;
import de.dakror.arise.net.packet.Packet09BuildingStageChange;
import de.dakror.arise.net.packet.Packet10Attribute;
import de.dakror.arise.net.packet.Packet11DeconstructBuilding;
import de.dakror.arise.server.DBManager;
import de.dakror.arise.server.ServerUpdater;
import de.dakror.arise.settings.CFG;
import de.dakror.gamesetup.util.Helper;
/**
* @author Dakror
*/
public class Server extends Thread
{
public static Server currentServer;
public static final int PORT = 14744;
public static final int PACKETSIZE = 255; // bytes
public static File dir;
public boolean running;
public CopyOnWriteArrayList<User> clients = new CopyOnWriteArrayList<>();
ServerUpdater updater;
DatagramSocket socket;
public Server(InetAddress ip)
{
currentServer = this;
try
{
dir = new File(CFG.DIR, "Server");
dir.mkdir();
socket = new DatagramSocket(new InetSocketAddress(ip, Server.PORT));
setName("Server-Thread");
setPriority(MAX_PRIORITY);
out("Connecting to database");
DBManager.init();
updater = new ServerUpdater();
out("Fetching configuration");
Game.loadConfig();
out("Starting server at " + socket.getLocalAddress().getHostAddress() + ":" + socket.getLocalPort());
start();
}
catch (BindException e)
{
err("There is a server already running on this machine!");
}
catch (SocketException e)
{
e.printStackTrace();
}
}
@Override
public void run()
{
running = true;
while (running)
{
byte[] data = new byte[PACKETSIZE];
DatagramPacket packet = new DatagramPacket(data, data.length);
try
{
socket.receive(packet);
parsePacket(data, packet.getAddress(), packet.getPort());
}
catch (SocketException e)
{}
catch (Exception e)
{
e.printStackTrace();
}
}
}
public void parsePacket(byte[] data, InetAddress address, int port)
{
PacketTypes type = Packet.lookupPacket(data[0]);
User user = getUserForIP(address, port);
if (user != null) user.interact();
AriseServer.trafficLog.setText(AriseServer.trafficLog.getText() + new SimpleDateFormat("'['HH:mm:ss']: '").format(new Date()) + "< " + address.getHostAddress() + ":" + port + " " + type.name() + "\n");
AriseServer.trafficLog.setCaretPosition(AriseServer.trafficLog.getDocument().getLength());
switch (type)
{
case INVALID:
{
err("Received invalid packet: " + new String(data));
break;
}
case HANDSHAKE:
{
try
{
sendPacket(new Packet00Handshake(), user == null ? new User(0, 0, address, port) : user);
if (user == null) out("Shook hands with: " + address.getHostAddress() + ":" + port);
break;
}
catch (Exception e)
{
e.printStackTrace();
}
}
case LOGIN:
{
try
{
Packet01Login p = new Packet01Login(data);
String s = Helper.getURLContent(new URL("http://dakror.de/mp-api/login_noip.php?username=" + p.getUsername() + "&password=" + p.getPwdMd5()));
boolean loggedIn = s.contains("true");
boolean worldExists = DBManager.getWorldForId(p.getWorldId()).getId() != -1;
if (loggedIn && worldExists)
{
String[] parts = s.split(":");
User u = new User(Integer.parseInt(parts[1].trim()), p.getWorldId(), address, port);
boolean alreadyLoggedIn = getUserForId(u.getId()) != null;
if (alreadyLoggedIn)
{
out("Refused login of " + address.getHostAddress() + ":" + port + " (" + Response.ALREADY_LOGGED_IN.name() + ")");
sendPacket(new Packet01Login(p.getUsername(), 0, p.getWorldId(), Response.ALREADY_LOGGED_IN), u);
}
else
{
out("User " + parts[2].trim() + " (#" + u.getId() + ")" + " logged in on world #" + p.getWorldId() + ".");
sendPacket(new Packet01Login(parts[2], u.getId(), p.getWorldId(), Response.LOGIN_OK), u);
clients.add(u);
}
}
else
{
out("Refused login of " + address.getHostAddress() + ":" + port + " (" + (!loggedIn ? Response.BAD_LOGIN : Response.BAD_WORLD_ID).name() + ")");
sendPacket(new Packet01Login(p.getUsername(), 0, p.getWorldId(), !loggedIn ? Response.BAD_LOGIN : Response.BAD_WORLD_ID), new User(0, 0, address, port));
}
}
catch (Exception e)
{
e.printStackTrace();
}
break;
}
case DISCONNECT:
{
Packet02Disconnect p = new Packet02Disconnect(data);
for (User u : clients)
{
if (u.getId() == p.getUserId() && address.equals(u.getIP()))
{
try
{
sendPacket(new Packet02Disconnect(0, Cause.SERVER_CONFIRMED), u);
out("User disconnected: #" + u.getId() + " (" + p.getCause().name() + ")");
clients.remove(u);
}
catch (Exception e)
{
e.printStackTrace();
}
}
}
break;
}
case WORLD:
{
try
{
Packet03World p = new Packet03World(data);
boolean spawn = DBManager.spawnPlayer(p.getId(), user);
out("Player's first visit on world? " + spawn);
sendPacket(DBManager.getWorldForId(p.getId()), user);
sendPacketToAllClientsExceptOne(DBManager.getSpawnCity(p.getId(), user.getId()), user);
break;
}
catch (Exception e)
{
e.printStackTrace();
}
}
case CITY:
{
Packet04City p = new Packet04City(data);
try
{
for (Packet04City packet : DBManager.getCities(p.getWorldId()))
sendPacket(packet, user);
}
catch (Exception e)
{
e.printStackTrace();
}
break;
}
case RESOURCES:
{
try
{
Packet05Resources p = new Packet05Resources(data);
if (DBManager.isCityFromUser(p.getCityId(), user)) sendPacket(new Packet05Resources(p.getCityId(), DBManager.getCityResources(p.getCityId())), user);
break;
}
catch (Exception e)
{
e.printStackTrace();
}
}
case BUILDING:
{
Packet06Building p = new Packet06Building(data);
if (p.getBuildingType() == 0 && DBManager.isCityFromUser(p.getCityId(), user))
{
try
{
for (Packet06Building packet : DBManager.getCityBuildings(p.getCityId()))
sendPacket(packet, user);
}
catch (Exception e)
{
e.printStackTrace();
}
}
break;
}
case RENAMECITY:
{
Packet07RenameCity p = new Packet07RenameCity(data);
if (DBManager.isCityFromUser(p.getCityId(), user))
{
boolean worked = DBManager.renameCity(p.getCityId(), p.getNewName(), user);
try
{
sendPacket(new Packet07RenameCity(p.getCityId(), worked ? p.getNewName() : "#false#"), getUserForIP(address, port));
}
catch (Exception e)
{
e.printStackTrace();
}
}
break;
}
case PLACEBUILDING:
{
Packet08PlaceBuilding p = new Packet08PlaceBuilding(data);
if (DBManager.isCityFromUser(p.getCityId(), user))
{
int id = DBManager.placeBuilding(p.getCityId(), p.getBuildingType(), p.getX(), p.getY());
if (id != 0)
{
try
{
sendPacket(DBManager.getCityBuilding(p.getCityId(), id), user);
sendPacket(new Packet05Resources(p.getCityId(), DBManager.getCityResources(p.getCityId())), user);
}
catch (Exception e)
{
e.printStackTrace();
}
}
}
break;
}
case ATTRIBUTE:
{
Packet10Attribute p = new Packet10Attribute(data);
if (user != null)
{
if (p.getKey().equals("city")) user.setCity(Integer.parseInt(p.getValue()));
}
break;
}
case DECONSTRUCTBUILDING:
{
Packet11DeconstructBuilding p = new Packet11DeconstructBuilding(data);
if (DBManager.isCityFromUser(p.getCityId(), user))
{
int timeleft = 0;
if ((timeleft = DBManager.deconstructBuilding(p.getCityId(), p.getBuildingId())) > -1)
{
try
{
sendPacket(new Packet09BuildingStageChange(p.getBuildingId(), p.getCityId(), 2, timeleft), user);
}
catch (Exception e)
{
e.printStackTrace();
}
}
}
break;
}
default:
err("Received unhandled packet (" + address.getHostAddress() + ":" + port + ") " + type + " [" + Packet.readData(data) + "]");
}
}
public void sendPacketToAllClients(Packet p) throws Exception
{
for (User u : clients)
sendPacket(p, u);
}
public void sendPacketToAllClientsExceptOne(Packet p, User exception) throws Exception
{
for (User u : clients)
{
if (exception.getId() == 0)
{
if (exception.getIP().equals(u.getIP()) && exception.getPort() == u.getPort()) continue;
}
else if (exception.getId() == u.getId()) continue;
sendPacket(p, u);
}
}
public void sendPacket(Packet p, User u) throws Exception
{
if (u == null) throw new NullPointerException("user = null");
byte[] data = p.getData();
DatagramPacket packet = new DatagramPacket(data, data.length, u.getIP(), u.getPort());
socket.send(packet);
AriseServer.trafficLog.setText(AriseServer.trafficLog.getText() + new SimpleDateFormat("'['HH:mm:ss']: '").format(new Date()) + "> " + u.getIP().getHostAddress() + ":" + u.getPort() + " " + p.getType().name() + "\n");
AriseServer.trafficLog.setCaretPosition(AriseServer.trafficLog.getDocument().getLength());
}
public User getUserForIP(InetAddress address, int port)
{
for (User u : clients)
if (u.getIP().equals(address) && u.getPort() == port) return u;
return null;
}
public User getUserForId(int id)
{
for (User u : clients)
if (u.getId() == id) return u;
return null;
}
public void shutdown()
{
try
{
sendPacketToAllClients(new Packet02Disconnect(0, Packet02Disconnect.Cause.SERVER_CLOSED));
}
catch (Exception e)
{
e.printStackTrace();
}
running = false;
socket.close();
}
public static void out(Object... p)
{
String timestamp = new SimpleDateFormat("'['HH:mm:ss']: '").format(new Date());
if (p.length == 1) System.out.println(timestamp + p[0]);
else System.out.println(timestamp + Arrays.toString(p));
}
public static void err(Object... p)
{
String timestamp = new SimpleDateFormat("'['HH:mm:ss']: '").format(new Date());
if (p.length == 1) System.err.println(timestamp + p[0]);
else System.err.println(timestamp + Arrays.toString(p));
}
}
| bugfix
[ci skip] | src/main/java/de/dakror/arise/net/Server.java | bugfix [ci skip] | <ide><path>rc/main/java/de/dakror/arise/net/Server.java
<ide> try
<ide> {
<ide> sendPacket(new Packet09BuildingStageChange(p.getBuildingId(), p.getCityId(), 2, timeleft), user);
<add> sendPacket(new Packet05Resources(p.getCityId(), DBManager.getCityResources(p.getCityId())), user);
<ide> }
<ide> catch (Exception e)
<ide> { |
|
Java | mit | 8634e4ffc18a0b434238b4ad6b1465855afdb49a | 0 | tndatacommons/android-app,Revenaunt/android-app,tndatacommons/android-grow-app,izzyalonso/android-app | package org.tndata.android.compass.adapter;
import android.content.Context;
import android.support.annotation.NonNull;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.Animation;
import android.view.animation.Transformation;
import android.widget.LinearLayout;
import android.widget.TextView;
import org.tndata.android.compass.R;
import org.tndata.android.compass.model.Action;
import org.tndata.android.compass.model.Behavior;
import org.tndata.android.compass.model.Category;
import org.tndata.android.compass.model.Goal;
import org.tndata.android.compass.model.Trigger;
import org.tndata.android.compass.ui.PriorityItemView;
import org.tndata.android.compass.util.ImageLoader;
import java.util.LinkedList;
/**
* Adapter for the goal list on my priorities. It handles all of its events and animations.
*
* @author Ismael Alonso
* @version 1.0.0
*/
public class MyPrioritiesGoalAdapter extends RecyclerView.Adapter{
//The expansion mode, if set to true, when a view expands the opened one (if any) collapses
private final boolean mSingleExpandedGoalMode;
//Context, category, and listener
private Context mContext;
private Category mCategory;
private OnItemClickListener mListener;
private ViewHolder mClickedHolder;
private boolean[] mExpandedGoals;
private ViewHolder mExpandedGoal;
private int mExpanded;
private int mCollapsing;
/**
* Constructor.
*
* @param context the application context.
* @param category the selected category.
* @param listener the receiver of tap events.
*/
public MyPrioritiesGoalAdapter(@NonNull Context context, @NonNull Category category,
@NonNull OnItemClickListener listener){
mSingleExpandedGoalMode = true;
mContext = context;
mCategory = category;
mListener = listener;
mClickedHolder = null;
mExpandedGoals = new boolean[mCategory.getGoals().size()];
for (int i = 0; i < mExpandedGoals.length; i++){
mExpandedGoals[i] = false;
}
mExpandedGoal = null;
mExpanded = -1;
mCollapsing = -1;
ViewHolder.viewPool = new LinkedList<>();
}
@Override
public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType){
LayoutInflater inflater = LayoutInflater.from(mContext);
View view = inflater.inflate(R.layout.item_my_priorities_goal, parent, false);
return new ViewHolder(view, this);
}
@Override
public void onBindViewHolder(RecyclerView.ViewHolder holder, int position){
((ViewHolder)holder).name.setText(mCategory.getGoals().get(position).getTitle());
}
@Override
public long getItemId(int position){
return position;
}
@Override
public int getItemCount(){
return mCategory.getGoals().size();
}
/**
* Populates the offspring of a goal with behaviors, actions, and triggers.
*
* @param holder the view holder hosting the goal.
* @param position the position of the goal in the backing list.
*/
private void populate(ViewHolder holder, int position){
Goal goal = mCategory.getGoals().get(position);
//For each behavior in the goal
for (Behavior behavior:goal.getBehaviors()){
//A priority item view is retrieved and populated
PriorityItemView behaviorView = getPriorityItemView();
behaviorView.setItemHierarchy(new ItemHierarchy(mCategory, goal, behavior, null));
behaviorView.setLeftPadding(20);
behaviorView.getTextView().setText(behavior.getTitle());
if (behavior.getIconUrl() != null){
ImageLoader.loadBitmap(behaviorView.getImageView(), behavior.getIconUrl(), false);
}
behaviorView.setOnClickListener(holder);
//The view is added to the goal's offspring
holder.offspring.addView(behaviorView);
Log.d("BehaviourActions", behavior.getActions().size() + "");
//For each action in the behavior
for (Action action:behavior.getActions()){
PriorityItemView actionView = getPriorityItemView();
actionView.setItemHierarchy(new ItemHierarchy(mCategory, goal, behavior, action));
actionView.setLeftPadding(40);
actionView.getTextView().setText(action.getTitle());
if (action.getIconUrl() != null){
ImageLoader.loadBitmap(actionView.getImageView(), action.getIconUrl(), false);
}
actionView.setOnClickListener(holder);
holder.offspring.addView(actionView);
Trigger trigger = action.getCustomTrigger();
if (trigger != null){
PriorityItemView triggerView = getPriorityItemView();
triggerView.setItemHierarchy(new ItemHierarchy(mCategory, goal, behavior, action));
triggerView.setLeftPadding(65);
String triggerText = trigger.getRecurrencesDisplay();
String date = trigger.getFormattedDate();
if (!date.equals("")){
triggerText += " " + date;
}
String triggerDate = trigger.getFormattedTime();
if (!triggerDate.equals("")){
triggerText += " " + triggerDate;
}
if (!triggerText.equals("")){
triggerView.getTextView().setText(triggerText);
triggerView.getImageView().setVisibility(View.GONE);
triggerView.setOnClickListener(holder);
holder.offspring.addView(triggerView);
}
else{
ViewHolder.recycleView(triggerView);
}
}
}
}
//Add behaviours view
PriorityItemView addBehaviors = getPriorityItemView();
addBehaviors.setItemHierarchy(new ItemHierarchy(mCategory, goal, null, null));
addBehaviors.setLeftPadding(0);
addBehaviors.getTextView().setText("Add behaviors");
addBehaviors.getImageView().setVisibility(View.GONE);
addBehaviors.setOnClickListener(holder);
holder.offspring.addView(addBehaviors);
}
/**
* Recycles the offspring of a goal.
*
* @param holder the view holder hosting the goal.
*/
private void recycle(ViewHolder holder){
//Add all the views to the recycled queue and clear the offspring.
for (int i = 0; i < holder.offspring.getChildCount(); i++){
ViewHolder.recycleView((PriorityItemView)holder.offspring.getChildAt(i));
}
holder.offspring.removeAllViews();
Log.d("MyPriorities", ViewHolder.viewPool.size() + " views in the recycled queue");
}
/**
* Expands a goal.
*
* @param holder the view holder hosting the goal.
* @param position the position of the goal in the backing array.
*/
private void expand(ViewHolder holder, int position){
//The position is marked as expanded
mExpanded = position;
//Populate only if the view is not collapsing. Collapsing does not recycle until
// it is done, making it necessary to do this check.
if (mCollapsing != position){
populate(holder, position);
}
holder.offspring.setVisibility(View.VISIBLE);
final View view = holder.offspring;
view.measure(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.WRAP_CONTENT);
final int targetHeight = view.getMeasuredHeight();
view.getLayoutParams().height = 0;
view.setVisibility(View.VISIBLE);
Animation animation = new Animation(){
@Override
protected void applyTransformation(float interpolatedTime, Transformation t){
view.getLayoutParams().height = (interpolatedTime == 1)
? LinearLayout.LayoutParams.WRAP_CONTENT
: (int)(targetHeight * interpolatedTime);
view.requestLayout();
}
@Override
public boolean willChangeBounds(){
return true;
}
};
//1dp/ms
int length = (int)(targetHeight/view.getContext().getResources().getDisplayMetrics().density);
animation.setDuration(length);
view.startAnimation(animation);
}
/**
* Returns a free PriorityItemView.
*
* @return a PriorityItemView ready to be populated.
*/
private PriorityItemView getPriorityItemView(){
//If the recycle queue is empty it creates a new one, otherwise, returns the first one
if (ViewHolder.viewPool.isEmpty()){
return new PriorityItemView(mContext);
}
else{
return ViewHolder.viewPool.removeFirst();
}
}
/**
* Collapses a goal.
*
* @param holder the view holder hosting the goal.
* @param position the position of the goal in the backing array.
*/
private void collapse(final ViewHolder holder, int position){
mCollapsing = position;
final ViewGroup view = holder.offspring;
final int initialHeight = view.getMeasuredHeight();
Animation animation = new Animation(){
@Override
protected void applyTransformation(float interpolatedTime, Transformation t){
if(interpolatedTime == 1){
if (mCollapsing != -1){
mCollapsing = -1;
view.setVisibility(View.GONE);
recycle(holder);
}
}
else{
view.getLayoutParams().height = initialHeight-(int)(initialHeight*interpolatedTime);
view.requestLayout();
}
}
@Override
public boolean willChangeBounds(){
return true;
}
};
//1dp/ms
int length = (int)(initialHeight/view.getContext().getResources().getDisplayMetrics().density);
animation.setDuration(length);
view.startAnimation(animation);
}
/**
* Handles click events on goals.
*
* @param holder the holder hosting the clicked goal.
* @param position the position of the clicked goal in the backing array.
*/
public void onItemClick(ViewHolder holder, int position){
if (mSingleExpandedGoalMode){
//If there is an expanded goal, collapse it
if (mExpandedGoal != null){
collapse(mExpandedGoal, mExpanded);
}
//If the item clicked is not expanded, then expand it
if (mExpandedGoal != holder){
mExpandedGoal = holder;
expand(holder, position);
}
else{
mExpandedGoal = null;
}
}
else{
//Collapse if expanded, expand if collapsed
if (mExpandedGoals[position]){
collapse(holder, mExpanded);
}
else{
expand(holder, position);
}
mExpandedGoals[position] = !mExpandedGoals[position];
}
}
/**
* Handles click events on items other than goals.
*
* @param holder the holder hosting the clicked item.
* @param view the clicked view.
*/
public void onPriorityItemClick(ViewHolder holder, View view){
//Determine the type of item and act accordingly
mClickedHolder = holder;
ItemHierarchy itemHierarchy = ((PriorityItemView)view).getItemHierarchy();
if (itemHierarchy.mAction != null){
mListener.onActionClick(itemHierarchy.mCategory, itemHierarchy.mGoal,
itemHierarchy.mBehavior, itemHierarchy.mAction);
}
else if (itemHierarchy.mBehavior != null){
mListener.onBehaviorClick(itemHierarchy.mCategory, itemHierarchy.mGoal,
itemHierarchy.mBehavior);
}
else{
mListener.onAddBehaviorsClick(itemHierarchy.mCategory, itemHierarchy.mGoal);
}
}
/**
* Update the data at the holder containing the last clicked item.
*/
public void updateData(){
if (mClickedHolder != null){
recycle(mClickedHolder);
populate(mClickedHolder, mClickedHolder.getAdapterPosition());
}
}
/**
* The item view holder. Also contains a pool of resources.
*
* @author Ismael Alonso
* @version 1.0.0
*/
static class ViewHolder extends RecyclerView.ViewHolder implements View.OnClickListener{
//This is a pool of TextViews to be reused. Any unused TextViews should be placed
// here. Before creating new ones, the list should be checked to see if there are
// any of them available.
private static LinkedList<PriorityItemView> viewPool;
private MyPrioritiesGoalAdapter mListener;
//Components
private TextView name;
private LinearLayout offspring;
/**
* Constructor.
*
* @param itemView the root view.
* @param listener the listener.
*/
public ViewHolder(View itemView, MyPrioritiesGoalAdapter listener){
super(itemView);
mListener = listener;
name = (TextView)itemView.findViewById(R.id.my_priorities_goal_name);
name.setOnClickListener(this);
offspring = (LinearLayout)itemView.findViewById(R.id.my_priorities_goal_offspring);
}
/**
* Recycles a priority item view.
*
* @param view the view to be recycled.
*/
public static void recycleView(PriorityItemView view){
view.getImageView().setVisibility(View.VISIBLE);
view.setOnClickListener(null);
viewPool.add(view);
}
@Override
public void onClick(View view){
if (view instanceof TextView){
mListener.onItemClick(this, getAdapterPosition());
}
else if (view instanceof PriorityItemView){
mListener.onPriorityItemClick(this, view);
}
}
}
/**
* Data holder for the hierarchy of an item.
*
* @author Ismael Alonso
* @version 1.0.0
*/
public static class ItemHierarchy{
private Category mCategory;
private Goal mGoal;
private Behavior mBehavior;
private Action mAction;
/**
* Constructor.
*
* @param category the category.
* @param goal the goal.
* @param behavior the behavior.
* @param action the action.
*/
public ItemHierarchy(Category category, Goal goal, Behavior behavior, Action action){
mCategory = category;
mGoal = goal;
mBehavior = behavior;
mAction = action;
}
}
/**
* Item click listener interface.
*
* @author Ismael Alonso
* @version 1.0.0
*/
public interface OnItemClickListener{
/**
* Triggered when the add behaviors item is clicked.
*
* @param category the category containing the goal.
* @param goal the goal.
*/
void onAddBehaviorsClick(Category category, Goal goal);
/**
* Triggered when a behaviour is clicked.
*
* @param category the category containing the goal containing the behavior.
* @param goal the goal containing the behavior.
* @param behavior the behavior.
*/
void onBehaviorClick(Category category, Goal goal, Behavior behavior);
/**
* Triggered when an action is clicked.
*
* @param category the category containing the goal containing the behavior containing
* the action.
* @param goal the goal containing the behavior containing the action.
* @param behavior the behavior containing the action.
* @param action the action.
*/
void onActionClick(Category category, Goal goal, Behavior behavior, Action action);
}
}
| src/main/java/org/tndata/android/compass/adapter/MyPrioritiesGoalAdapter.java | package org.tndata.android.compass.adapter;
import android.content.Context;
import android.support.annotation.NonNull;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.Animation;
import android.view.animation.Transformation;
import android.widget.LinearLayout;
import android.widget.TextView;
import org.tndata.android.compass.R;
import org.tndata.android.compass.model.Action;
import org.tndata.android.compass.model.Behavior;
import org.tndata.android.compass.model.Category;
import org.tndata.android.compass.model.Goal;
import org.tndata.android.compass.model.Trigger;
import org.tndata.android.compass.ui.PriorityItemView;
import org.tndata.android.compass.util.ImageLoader;
import java.util.LinkedList;
/**
* Adapter for the goal list on my priorities. It handles all of its events and animations.
*
* @author Ismael Alonso
* @version 1.0.0
*/
public class MyPrioritiesGoalAdapter extends RecyclerView.Adapter{
//The expansion mode, if set to true, when a view expands the opened one (if any) collapses
private final boolean mSingleExpandedGoalMode;
//Context, category, and listener
private Context mContext;
private Category mCategory;
private OnItemClickListener mListener;
private ViewHolder mClickedHolder;
private boolean[] mExpandedGoals;
private ViewHolder mExpandedGoal;
private int mExpanded;
private int mCollapsing;
/**
* Constructor.
*
* @param context the application context.
* @param category the selected category.
* @param listener the receiver of tap events.
*/
public MyPrioritiesGoalAdapter(@NonNull Context context, @NonNull Category category,
@NonNull OnItemClickListener listener){
mSingleExpandedGoalMode = true;
mContext = context;
mCategory = category;
mListener = listener;
mClickedHolder = null;
mExpandedGoals = new boolean[mCategory.getGoals().size()];
for (int i = 0; i < mExpandedGoals.length; i++){
mExpandedGoals[i] = false;
}
mExpandedGoal = null;
mExpanded = -1;
mCollapsing = -1;
ViewHolder.viewPool = new LinkedList<>();
}
@Override
public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType){
LayoutInflater inflater = LayoutInflater.from(mContext);
View view = inflater.inflate(R.layout.item_my_priorities_goal, parent, false);
return new ViewHolder(view, this);
}
@Override
public void onBindViewHolder(RecyclerView.ViewHolder holder, int position){
((ViewHolder)holder).name.setText(mCategory.getGoals().get(position).getTitle());
}
@Override
public long getItemId(int position){
return position;
}
@Override
public int getItemCount(){
return mCategory.getGoals().size();
}
/**
* Populates the offspring of a goal with behaviors, actions, and triggers.
*
* @param holder the view holder hosting the goal.
* @param position the position of the goal in the backing list.
*/
private void populate(ViewHolder holder, int position){
Goal goal = mCategory.getGoals().get(position);
//For each behavior in the goal
for (Behavior behavior:goal.getBehaviors()){
//A priority item view is retrieved and populated
PriorityItemView behaviorView = getPriorityItemView();
behaviorView.setItemHierarchy(new ItemHierarchy(mCategory, goal, behavior, null));
behaviorView.setLeftPadding(20);
behaviorView.getTextView().setText(behavior.getTitle());
ImageLoader.loadBitmap(behaviorView.getImageView(), behavior.getIconUrl(), false);
behaviorView.setOnClickListener(holder);
//The view is added to the goal's offspring
holder.offspring.addView(behaviorView);
Log.d("BehaviourActions", behavior.getActions().size() + "");
//For each action in the behavior
for (Action action:behavior.getActions()){
PriorityItemView actionView = getPriorityItemView();
actionView.setItemHierarchy(new ItemHierarchy(mCategory, goal, behavior, action));
actionView.setLeftPadding(40);
actionView.getTextView().setText(action.getTitle());
ImageLoader.loadBitmap(actionView.getImageView(), action.getIconUrl(), false);
actionView.setOnClickListener(holder);
holder.offspring.addView(actionView);
Trigger trigger = action.getCustomTrigger();
if (trigger != null){
PriorityItemView triggerView = getPriorityItemView();
triggerView.setItemHierarchy(new ItemHierarchy(mCategory, goal, behavior, action));
triggerView.setLeftPadding(65);
String triggerText = trigger.getRecurrencesDisplay();
String date = trigger.getFormattedDate();
if (!date.equals("")){
triggerText += " " + date;
}
triggerText += " " + trigger.getFormattedTime();
if (!triggerText.equals("")){
triggerView.getTextView().setText(triggerText);
triggerView.getImageView().setVisibility(View.GONE);
triggerView.setOnClickListener(holder);
holder.offspring.addView(triggerView);
}
else{
ViewHolder.recycleView(triggerView);
}
}
}
}
//Add behaviours view
PriorityItemView addBehaviors = getPriorityItemView();
addBehaviors.setItemHierarchy(new ItemHierarchy(mCategory, goal, null, null));
addBehaviors.setLeftPadding(0);
addBehaviors.getTextView().setText("Add behaviors");
addBehaviors.getImageView().setVisibility(View.GONE);
addBehaviors.setOnClickListener(holder);
holder.offspring.addView(addBehaviors);
}
/**
* Recycles the offspring of a goal.
*
* @param holder the view holder hosting the goal.
*/
private void recycle(ViewHolder holder){
//Add all the views to the recycled queue and clear the offspring.
for (int i = 0; i < holder.offspring.getChildCount(); i++){
ViewHolder.recycleView((PriorityItemView)holder.offspring.getChildAt(i));
}
holder.offspring.removeAllViews();
Log.d("MyPriorities", ViewHolder.viewPool.size() + " views in the recycled queue");
}
/**
* Expands a goal.
*
* @param holder the view holder hosting the goal.
* @param position the position of the goal in the backing array.
*/
private void expand(ViewHolder holder, int position){
//The position is marked as expanded
mExpanded = position;
//Populate only if the view is not collapsing. Collapsing does not recycle until
// it is done, making it necessary to do this check.
if (mCollapsing != position){
populate(holder, position);
}
holder.offspring.setVisibility(View.VISIBLE);
final View view = holder.offspring;
view.measure(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.WRAP_CONTENT);
final int targetHeight = view.getMeasuredHeight();
view.getLayoutParams().height = 0;
view.setVisibility(View.VISIBLE);
Animation animation = new Animation(){
@Override
protected void applyTransformation(float interpolatedTime, Transformation t){
view.getLayoutParams().height = (interpolatedTime == 1)
? LinearLayout.LayoutParams.WRAP_CONTENT
: (int)(targetHeight * interpolatedTime);
view.requestLayout();
}
@Override
public boolean willChangeBounds(){
return true;
}
};
//1dp/ms
int length = (int)(targetHeight/view.getContext().getResources().getDisplayMetrics().density);
animation.setDuration(length);
view.startAnimation(animation);
}
/**
* Returns a free PriorityItemView.
*
* @return a PriorityItemView ready to be populated.
*/
private PriorityItemView getPriorityItemView(){
//If the recycle queue is empty it creates a new one, otherwise, returns the first one
if (ViewHolder.viewPool.isEmpty()){
return new PriorityItemView(mContext);
}
else{
return ViewHolder.viewPool.removeFirst();
}
}
/**
* Collapses a goal.
*
* @param holder the view holder hosting the goal.
* @param position the position of the goal in the backing array.
*/
private void collapse(final ViewHolder holder, int position){
mCollapsing = position;
final ViewGroup view = holder.offspring;
final int initialHeight = view.getMeasuredHeight();
Animation animation = new Animation(){
@Override
protected void applyTransformation(float interpolatedTime, Transformation t){
if(interpolatedTime == 1){
if (mCollapsing != -1){
mCollapsing = -1;
view.setVisibility(View.GONE);
recycle(holder);
}
}
else{
view.getLayoutParams().height = initialHeight-(int)(initialHeight*interpolatedTime);
view.requestLayout();
}
}
@Override
public boolean willChangeBounds(){
return true;
}
};
//1dp/ms
int length = (int)(initialHeight/view.getContext().getResources().getDisplayMetrics().density);
animation.setDuration(length);
view.startAnimation(animation);
}
/**
* Handles click events on goals.
*
* @param holder the holder hosting the clicked goal.
* @param position the position of the clicked goal in the backing array.
*/
public void onItemClick(ViewHolder holder, int position){
if (mSingleExpandedGoalMode){
//If there is an expanded goal, collapse it
if (mExpandedGoal != null){
collapse(mExpandedGoal, mExpanded);
}
//If the item clicked is not expanded, then expand it
if (mExpandedGoal != holder){
mExpandedGoal = holder;
expand(holder, position);
}
else{
mExpandedGoal = null;
}
}
else{
//Collapse if expanded, expand if collapsed
if (mExpandedGoals[position]){
collapse(holder, mExpanded);
}
else{
expand(holder, position);
}
mExpandedGoals[position] = !mExpandedGoals[position];
}
}
/**
* Handles click events on items other than goals.
*
* @param holder the holder hosting the clicked item.
* @param view the clicked view.
*/
public void onPriorityItemClick(ViewHolder holder, View view){
//Determine the type of item and act accordingly
mClickedHolder = holder;
ItemHierarchy itemHierarchy = ((PriorityItemView)view).getItemHierarchy();
if (itemHierarchy.mAction != null){
mListener.onActionClick(itemHierarchy.mCategory, itemHierarchy.mGoal,
itemHierarchy.mBehavior, itemHierarchy.mAction);
}
else if (itemHierarchy.mBehavior != null){
mListener.onBehaviorClick(itemHierarchy.mCategory, itemHierarchy.mGoal,
itemHierarchy.mBehavior);
}
else{
mListener.onAddBehaviorsClick(itemHierarchy.mCategory, itemHierarchy.mGoal);
}
}
/**
* Update the data at the holder containing the last clicked item.
*/
public void updateData(){
if (mClickedHolder != null){
recycle(mClickedHolder);
populate(mClickedHolder, mClickedHolder.getAdapterPosition());
}
}
/**
* The item view holder. Also contains a pool of resources.
*
* @author Ismael Alonso
* @version 1.0.0
*/
static class ViewHolder extends RecyclerView.ViewHolder implements View.OnClickListener{
//This is a pool of TextViews to be reused. Any unused TextViews should be placed
// here. Before creating new ones, the list should be checked to see if there are
// any of them available.
private static LinkedList<PriorityItemView> viewPool;
private MyPrioritiesGoalAdapter mListener;
//Components
private TextView name;
private LinearLayout offspring;
/**
* Constructor.
*
* @param itemView the root view.
* @param listener the listener.
*/
public ViewHolder(View itemView, MyPrioritiesGoalAdapter listener){
super(itemView);
mListener = listener;
name = (TextView)itemView.findViewById(R.id.my_priorities_goal_name);
name.setOnClickListener(this);
offspring = (LinearLayout)itemView.findViewById(R.id.my_priorities_goal_offspring);
}
/**
* Recycles a priority item view.
*
* @param view the view to be recycled.
*/
public static void recycleView(PriorityItemView view){
view.getImageView().setVisibility(View.VISIBLE);
view.setOnClickListener(null);
viewPool.add(view);
}
@Override
public void onClick(View view){
if (view instanceof TextView){
mListener.onItemClick(this, getAdapterPosition());
}
else if (view instanceof PriorityItemView){
mListener.onPriorityItemClick(this, view);
}
}
}
/**
* Data holder for the hierarchy of an item.
*
* @author Ismael Alonso
* @version 1.0.0
*/
public static class ItemHierarchy{
private Category mCategory;
private Goal mGoal;
private Behavior mBehavior;
private Action mAction;
/**
* Constructor.
*
* @param category the category.
* @param goal the goal.
* @param behavior the behavior.
* @param action the action.
*/
public ItemHierarchy(Category category, Goal goal, Behavior behavior, Action action){
mCategory = category;
mGoal = goal;
mBehavior = behavior;
mAction = action;
}
}
/**
* Item click listener interface.
*
* @author Ismael Alonso
* @version 1.0.0
*/
public interface OnItemClickListener{
/**
* Triggered when the add behaviors item is clicked.
*
* @param category the category containing the goal.
* @param goal the goal.
*/
void onAddBehaviorsClick(Category category, Goal goal);
/**
* Triggered when a behaviour is clicked.
*
* @param category the category containing the goal containing the behavior.
* @param goal the goal containing the behavior.
* @param behavior the behavior.
*/
void onBehaviorClick(Category category, Goal goal, Behavior behavior);
/**
* Triggered when an action is clicked.
*
* @param category the category containing the goal containing the behavior containing
* the action.
* @param goal the goal containing the behavior containing the action.
* @param behavior the behavior containing the action.
* @param action the action.
*/
void onActionClick(Category category, Goal goal, Behavior behavior, Action action);
}
}
| Bugfix: items with null URLs do not call the ImageLoader
| src/main/java/org/tndata/android/compass/adapter/MyPrioritiesGoalAdapter.java | Bugfix: items with null URLs do not call the ImageLoader | <ide><path>rc/main/java/org/tndata/android/compass/adapter/MyPrioritiesGoalAdapter.java
<ide> behaviorView.setItemHierarchy(new ItemHierarchy(mCategory, goal, behavior, null));
<ide> behaviorView.setLeftPadding(20);
<ide> behaviorView.getTextView().setText(behavior.getTitle());
<del> ImageLoader.loadBitmap(behaviorView.getImageView(), behavior.getIconUrl(), false);
<add> if (behavior.getIconUrl() != null){
<add> ImageLoader.loadBitmap(behaviorView.getImageView(), behavior.getIconUrl(), false);
<add> }
<ide> behaviorView.setOnClickListener(holder);
<ide>
<ide> //The view is added to the goal's offspring
<ide> actionView.setItemHierarchy(new ItemHierarchy(mCategory, goal, behavior, action));
<ide> actionView.setLeftPadding(40);
<ide> actionView.getTextView().setText(action.getTitle());
<del> ImageLoader.loadBitmap(actionView.getImageView(), action.getIconUrl(), false);
<add> if (action.getIconUrl() != null){
<add> ImageLoader.loadBitmap(actionView.getImageView(), action.getIconUrl(), false);
<add> }
<ide> actionView.setOnClickListener(holder);
<ide> holder.offspring.addView(actionView);
<ide>
<ide> if (!date.equals("")){
<ide> triggerText += " " + date;
<ide> }
<del> triggerText += " " + trigger.getFormattedTime();
<add> String triggerDate = trigger.getFormattedTime();
<add> if (!triggerDate.equals("")){
<add> triggerText += " " + triggerDate;
<add> }
<ide> if (!triggerText.equals("")){
<ide> triggerView.getTextView().setText(triggerText);
<ide> triggerView.getImageView().setVisibility(View.GONE); |
|
Java | apache-2.0 | 13f439a19b219a0b0cb8fc40735a39d32bd71ca5 | 0 | mtransitapps/ca-edmonton-ets-bus-parser | package org.mtransit.parser.ca_edmonton_ets_bus;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
import org.mtransit.parser.CleanUtils;
import org.mtransit.parser.DefaultAgencyTools;
import org.mtransit.parser.Pair;
import org.mtransit.parser.SplitUtils;
import org.mtransit.parser.SplitUtils.RouteTripSpec;
import org.mtransit.parser.Utils;
import org.mtransit.parser.gtfs.data.GCalendar;
import org.mtransit.parser.gtfs.data.GCalendarDate;
import org.mtransit.parser.gtfs.data.GRoute;
import org.mtransit.parser.gtfs.data.GSpec;
import org.mtransit.parser.gtfs.data.GStop;
import org.mtransit.parser.gtfs.data.GTrip;
import org.mtransit.parser.gtfs.data.GTripStop;
import org.mtransit.parser.mt.data.MAgency;
import org.mtransit.parser.mt.data.MDirectionType;
import org.mtransit.parser.mt.data.MRoute;
import org.mtransit.parser.mt.data.MTrip;
import org.mtransit.parser.mt.data.MTripStop;
// https://data.edmonton.ca/
// https://data.edmonton.ca/Transit/ETS-Bus-Schedule-GTFS-Data-Feed-zipped-files/gzhc-5ss6
// https://data.edmonton.ca/download/gzhc-5ss6/application/zip
// http://www.edmonton.ca/ets/ets-data-for-developers.aspx
public class EdmontonETSBusAgencyTools extends DefaultAgencyTools {
public static void main(String[] args) {
if (args == null || args.length == 0) {
args = new String[3];
args[0] = "input/gtfs.zip";
args[1] = "../../mtransitapps/ca-edmonton-ets-bus-android/res/raw/";
args[2] = ""; // files-prefix
}
new EdmontonETSBusAgencyTools().start(args);
}
private HashSet<String> serviceIds;
@Override
public void start(String[] args) {
System.out.printf("\nGenerating ETS bus data...");
long start = System.currentTimeMillis();
this.serviceIds = extractUsefulServiceIds(args, this, true);
super.start(args);
System.out.printf("\nGenerating ETS bus data... DONE in %s.\n", Utils.getPrettyDuration(System.currentTimeMillis() - start));
}
@Override
public boolean excludeCalendar(GCalendar gCalendar) {
if (this.serviceIds != null) {
return excludeUselessCalendar(gCalendar, this.serviceIds);
}
return super.excludeCalendar(gCalendar);
}
@Override
public boolean excludeCalendarDate(GCalendarDate gCalendarDates) {
if (this.serviceIds != null) {
return excludeUselessCalendarDate(gCalendarDates, this.serviceIds);
}
return super.excludeCalendarDate(gCalendarDates);
}
@Override
public boolean excludeRoute(GRoute gRoute) {
return super.excludeRoute(gRoute);
}
@Override
public boolean excludeTrip(GTrip gTrip) {
if (this.serviceIds != null) {
return excludeUselessTrip(gTrip, this.serviceIds);
}
return super.excludeTrip(gTrip);
}
@Override
public Integer getAgencyRouteType() {
return MAgency.ROUTE_TYPE_BUS;
}
@Override
public long getRouteId(GRoute gRoute) {
return Long.parseLong(gRoute.getRouteId()); // using route short name as route ID
}
private static final String DASH = " - ";
private static final String SLASH = " / ";
private static final String FORT = "Fort";
private static final String _AVE = " Ave";
private static final String _ST = " St";
private static final String TRANSIT_CENTER_SHORT = "TC";
private static final String EDM = "Edm";
private static final String EDM_GARRISON = EDM + " Garrison";
private static final String WEST_EDM_MALL = "WEM"; // "West " + EDM + " Mall";
private static final String WEST_EDM_MALL_TC = "WEM"; // "WEM TC"
private static final String LEWIS_FARMS = "Lewis Farms";
private static final String LEWIS_FARMS_TC = "Lewis Farms"; // "Lewis Farms TC"
private static final String CAPILANO = "Capilano"; //
private static final String CAPILANO_TC = "Capilano"; // "Capilano TC"
private static final String CLAREVIEW = "Clareview";
private static final String CLAREVIEW_EAST_TC = "Clareview"; // "East Clareview TC"
private static final String CLAREVIEW_WEST_TC = "Clareview"; // "West Clareview TC"
private static final String CROMDALE = "Cromdale";
private static final String JASPER_PLACE = "Jasper Pl";
private static final String CONCORDIA = "Concordia";
private static final String COLISEUM = "Coliseum";
private static final String COLISEUM_TC = COLISEUM; // "Coliseum TC";
private static final String WESTMOUNT = "Westmount";
private static final String WESTMOUNT_TC = WESTMOUNT; // "Westmount TC"
private static final String UNIVERSITY = "University";
private static final String UNIVERSITY_TC = UNIVERSITY; // "University TC";
private static final String MILL_WOODS = "Mill Woods";
private static final String MILL_WOODS_ = "Mill Woods TC";
private static final String MILL_WOODS_TC = "Mill Woods TC";
private static final String DAN_KNOTT = "Dan Knott";
private static final String NAIT = "NAIT";
private static final String SOUTHGATE = "Southgate";
private static final String SOUTHGATE_TC = "Southgate"; // "Southgate TC"
private static final String NORTHGATE = "Northgate";
private static final String NORTHGATE_TC = "Northgate"; // "Northgate TC"
private static final String ABBOTTSFIELD = "Abbottsfield";
private static final String AMISKWACIY = "amiskwaciy";
private static final String EAUX_CLAIRES = "Eaux Claires";
private static final String DOWNTOWN = "Downtown";
private static final String MILLGATE = "Millgate";
private static final String MILLGATE_TC = "Millgate"; // "Millgate TC"
private static final String GOV_CTR = "Gov Ctr";
private static final String MAC_EWAN = "MacEwan";
private static final String MAC_EWAN_GOV_CTR = MAC_EWAN + SLASH + GOV_CTR;
private static final String CASTLE_DOWNS = "Castle Downs";
private static final String CASTLE_DOWNS_TC = "Castle Downs"; // "Castle Downs TC"
private static final String CENTURY_PK = "Century Pk";
private static final String CENTURY_PK_TC = CENTURY_PK; // "Century Pk TC";
private static final String YELLOWBIRD = "Yellowbird";
private static final String SOUTH_CAMPUS = "South Campus";
private static final String SOUTH_CAMPUS_TC = SOUTH_CAMPUS; // "South Campus TC";
private static final String FT_EDM = FORT + " " + EDM;
private static final String LEGER = "Leger";
private static final String LEGER_TC = LEGER; // "Leger TC"
private static final String BRANDER_GDNS = "Brander Gdns";
private static final String MEADOWS = "Mdws"; // "Meadows";
private static final String BLACKMUD_CRK = "Blackmud Crk";
private static final String BLACKBURNE = "Blackburne";
private static final String ALLARD = "Allard";
private static final String HARRY_AINLAY = "Harry Ainlay";
private static final String TWIN_BROOKS = "Twin Brooks";
private static final String RUTHERFORD = "Rutherford";
private static final String SOUTHWOOD = "Southwood";
private static final String SOUTH_EDM_COMMON = "South " + EDM + " Common";
private static final String PARKALLEN = "Parkallen";
private static final String KNOTTWOOD = "Knottwood";
private static final String BELVEDERE = "Belvedere";
private static final String BELVEDERE_TC = "Belvedere"; // "Belvedere TC"
private static final String BONNIE_DOON = "Bonnie Doon";
private static final String LAUREL = "Laurel";
private static final String PLYPOW = "Plypow";
private static final String TAMARACK = "Tamarack";
private static final String BRECKENRIDGE_GRNS = "Breckenridge Grns";
private static final String WESTRIDGE = "Westridge";
private static final String LESSARD = "Lessard";
private static final String CAMERON_HTS = "Cameron Hts";
private static final String LYMBURN = "Lymburn";
private static final String ARCH_MAC = "Arch Mac"; // Donald
private static final String ROSS_SHEPPARD = "Ross Shep"; // "Ross Sheppard";
private static final String ORMSBY_PL = "Ormsby Pl";
private static final String LYMBURN_ORMSBY_PL = LYMBURN + SLASH + ORMSBY_PL;
private static final String BERIAULT = "Beriault";
private static final String CRESTWOOD = "Crestwood";
private static final String ST_FRANCIS_XAVIER = "St Francis Xavier";
private static final String LA_PERLE = "LaPerle";
private static final String LA_ZERTE = "LaZerte";
private static final String MARY_BUTTERWORTH = "Mary Butterworth";
private static final String HILLCREST = "Hillcrest";
private static final String CARLTON = "Carlton";
private static final String WEDGEWOOD = "Wedgewood";
private static final String THE_GRANGE = "The Grange";
private static final String RIO_TERRACE = "Rio Ter";
private static final String THE_HAMPTONS = "The Hamptons";
private static final String WESTVIEW_VLG = "Westview Vlg";
private static final String MISTATIM_IND = "Mistatim Ind";
private static final String STADIUM = "Stadium";
private static final String STADIUM_TC = "Stadium"; // "Stadium TC"
private static final String LAGO_LINDO = "Lago Lindo";
private static final String MONTROSE = "Montrose";
private static final String KINGSWAY = "Kingsway";
private static final String KING_EDWARD_PK = "King Edward Pk";
private static final String RAPPERSWILL = "Rapperswill";
private static final String OXFORD = "Oxford";
private static final String _34_ST_35A_AVE = "34" + _ST + SLASH + "35A" + _AVE;
private static final String _82_ST = "82" + _ST;
private static final String _82_ST_132_AVE = "82" + _ST + SLASH + "132" + _AVE;
private static final String _84_ST_105_AVE = "84" + _ST + SLASH + "105" + _AVE;
private static final String _84_ST_111_AVE = "84" + _ST + SLASH + " 111" + _AVE;
private static final String _85_ST_132_AVE = "85" + _ST + DASH + "132" + _AVE;
private static final String _88_ST_132_AVE = "88" + _ST + SLASH + "132" + _AVE;
private static final String _95_ST_132_AVE = "95" + _ST + SLASH + "132" + _AVE;
private static final String _127_ST_129_AVE = "127" + _ST + SLASH + "129" + _AVE;
private static final String _142_ST_109_AVE = "142" + _ST + SLASH + "109" + _AVE;
private static final String WHITEMUD_DR_53_AVE = "Whitemud Dr" + SLASH + "53 " + _AVE;
private static final String JOSEPH_MC_NEIL = "Joseph McNeil";
private static final String CANOSSA = "Canossa";
private static final String CHAMBERY = "Chambery";
private static final String KERNOHAN = "Kernohan";
private static final String LONDONDERRY = "Londonderry";
private static final String EVERGREEN = "Evergreen";
private static final String FRASER = "Fraser";
private static final String FT_SASKATCHEWAN = FORT + " Saskatchewan";
private static final String SPRUCE_GRV = "Spruce Grv";
private static final String MC_CONACHIE = "McConachie";
private static final String SCHONSEE = "Schonsee";
private static final String BRINTNELL = "Brintnell";
private static final String KLARVATTEN = "Klarvatten";
private static final String RIVERDALE = "Riverdale";
private static final String GOLD_BAR = "Gold Bar";
private static final String JASPER_GATES = "Jasper Gts";
private static final String SOUTHPARK = "Southpark";
private static final String NORTHLANDS = "Northlands";
private static final String HAWKS_RDG = "Hawks Rdg";
private static final String WINTERBURN = "Winterburn";
private static final String WINTERBURN_IND = WINTERBURN + " Ind";
private static final String HOLYROOD = "Holyrood";
private static final String STRATHCONA = "Strathcona";
private static final String STRATHCONA_IND = STRATHCONA + " Ind";
private static final String WINDSOR_PARK = "Windsor Pk";
private static final String RITCHIE = "Ritchie";
private static final String AMBLESIDE = "Ambleside";
private static final String WINDERMERE = "Windermere";
private static final String BELGRAVIA = "Belgravia";
private static final String ROSENTHAL = "Rosenthal";
private static final String CHAPPELLE = "Chappelle";
private static final String ORCHARDS = "Orchards";
private static final String QUARRY_RDG = "Quarry Rdg";
private static final String HOLLICK_KENYON = "Hollick Kenyon";
private static final String MC_LEOD = "McLeod";
private static final String EDM_WASTE_MGT_CTR = EDM + " Waste Mgt Ctr";
private static final String VLY_ZOO = "Vly Zoo";
private static final String VLY_ZOO_FT_EDM = VLY_ZOO + SLASH + FT_EDM;
private static final String EDM_INT_AIRPORT = "Edm Int Airport";
private static final String GRIESBACH = "Griesbach";
private static final String REMAND_CTR = "Remand Ctr";
private static final String ARCH_O_LEARY = "Arch O'Leary";
private static final String OTTEWELL = "Ottewell";
private static final String AOB = "AOB";
private static final String OTTEWELL_AOB = OTTEWELL + SLASH + AOB;
private static final String BURNEWOOD = "Burnewood";
private static final String MC_PHERSON = "McPherson";
private static final String ST_ROSE = "St Rose";
private static final String OSCAR_ROMERO = "Oscar Romero";
private static final String BRAEMAR = "Braemar";
private static final String PARKVIEW = "Parkview";
private static final String QUEEN_ELIZABETH = "Queen Elizabeth";
private static final String HADDOW = "Haddow";
private static final String FR_TROY = "Fr Troy";
private static final String JACKSON_HTS = "Jackson Hts";
private static final String BATURYN = "Baturyn";
private static final String EASTGLEN = "Eastglen";
private static final String MINCHAU = "Minchau";
private static final String HOLY_FAMILY = "Holy Family";
private static final String MC_NALLY = "McNally";
private static final String SILVERBERRY = "SilverBerry";
private static final String VICTORIA = "Victoria";
private static final String MEADOWLARK = "Meadowlark";
private static final String WESTLAWN = "Westlawn";
private static final String BELMEAD = "Belmead";
private static final String MATT_BERRY = "Matt Berry";
private static final String JJ_BOWLEN = "JJ Bowlen";
private static final String CARDINAL_LEGER = "Cardinal Leger";
private static final String DUNLUCE = "Dunluce";
private static final String BEAUMARIS = "Beaumaris";
private static final String ELSINORE = "Elsinore";
private static final String RIVERBEND = "Riverbend";
private static final String BEARSPAW = "Bearspaw";
private static final String AVALON = "Avalon";
private static final String WILDROSE = "Wildrose";
private static final String GREENVIEW = "Greenview";
private static final String KENILWORTH = "Kenilworth";
private static final String HARDISTY = "Hardisty";
private static final String CRAWFORD_PLAINS = "Crawford Plains";
private static final String RHATIGAN_RIDGE = "Rhatigan Rdg";
private static final String AVONMORE = "Avonmore";
private static final String LARKSPUR = "Larkspur";
private static final String MAYLIEWAN = "Mayliewan";
private static final String WP_WAGNER = "WP Wagner";
private static final String BROOKSIDE = "Brookside";
private static final String MAGRATH = "Magrath";
private static final String LY_CAIRNS = "LY Cairns";
private static final String BRUCE_SMITH = "Bruce Smith";
private static final String JH_PICARD = "JH Picard";
private static final String TD_BAKER = "TD Baker";
private static final String ST_KEVIN = "St Kevin";
private static final String LAKEWOOD = "Lakewood";
private static final String WOODVALE = "Woodvale";
private static final String VERNON_BARFORD = "Vernon Barford";
private static final String BELLE_RIVE = "Belle Rive";
private static final String LENDRUM = "Lendrum";
@Override
public String getRouteLongName(GRoute gRoute) {
String gRouteLongName = gRoute.getRouteLongName();
gRouteLongName = CleanUtils.cleanStreetTypes(gRouteLongName);
return CleanUtils.cleanLabel(gRouteLongName);
}
@Override
public String getRouteShortName(GRoute gRoute) {
return super.getRouteShortName(gRoute); // do not change, used by real-time API
}
private static final String AGENCY_COLOR_BLUE = "2D3092"; // BLUE (from Wikipedia SVG)
private static final String AGENCY_COLOR = AGENCY_COLOR_BLUE;
@Override
public String getAgencyColor() {
return AGENCY_COLOR;
}
@Override
public int compareEarly(long routeId, List<MTripStop> list1, List<MTripStop> list2, MTripStop ts1, MTripStop ts2, GStop ts1GStop, GStop ts2GStop) {
if (ALL_ROUTE_TRIPS2.containsKey(routeId)) {
return ALL_ROUTE_TRIPS2.get(routeId).compare(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop);
}
return super.compareEarly(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop);
}
@Override
public int compare(long routeId, List<MTripStop> list1, List<MTripStop> list2, MTripStop ts1, MTripStop ts2, GStop ts1GStop, GStop ts2GStop) {
return super.compare(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop);
}
@Override
public void setTripHeadsign(MRoute mRoute, MTrip mTrip, GTrip gTrip, GSpec gtfs) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) {
return; // split
}
String tripHeadsign = gTrip.getTripHeadsign();
if ("1".equals(tripHeadsign)) {
tripHeadsign = null;
}
if (StringUtils.isEmpty(tripHeadsign)) {
System.out.printf("\nUnexpected trip to split %s\n", gTrip);
System.exit(-1);
}
mTrip.setHeadsignString(tripHeadsign, gTrip.getDirectionId()); // cleanTripHeadsign() currently used for stop head sign
}
@Override
public boolean mergeHeadsign(MTrip mTrip, MTrip mTripToMerge) {
System.out.printf("\nUnexpected trips to merge: %s & %s!\n", mTrip, mTripToMerge);
System.exit(-1);
return false;
}
@Override
public ArrayList<MTrip> splitTrip(MRoute mRoute, GTrip gTrip, GSpec gtfs) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) {
return ALL_ROUTE_TRIPS2.get(mRoute.getId()).getAllTrips();
}
return super.splitTrip(mRoute, gTrip, gtfs);
}
private static HashMap<Long, RouteTripSpec> ALL_ROUTE_TRIPS2;
static {
HashMap<Long, RouteTripSpec> map2 = new HashMap<Long, RouteTripSpec>();
map2.put(1l, new RouteTripSpec(1l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"5009", // West Edmonton Mall Transit Centre
"5302", // Meadowlark Transit Centre
"5110", // Jasper Place Transit Centre
"5169", // == 142 Street & Stony Plain Road
"5432", "1047", // !=
"5440", "1917", // !=
"1242", // == 124 Street & 102 Avenue
"1322", // == 103 Street & Jasper Avenue
"1336", // != 101 Street & Jasper Avenue
"1346", // != 101 Street & 101A Avenue
"1346", // 101 Street & 101A Avenue
"2591", // 79 Street & 106 Avenue
"2301" // Capilano Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"2301", // Capilano Transit Centre
"2267", // 79 Street & 106 Avenue
"1620", // 101 Street & Jasper Avenue
"1746", // == 122 Street & 102 Avenue
"1971", "5087", // !=
"1828", "5564", // !=
"5157", // == 140 Street & Stony Plain Road
"5101", // Jasper Place Transit Centre
"5301", // Meadowlark Transit Centre
"5009" // West Edmonton Mall Transit Centre
})) //
.compileBothTripSort());
map2.put(2l, new RouteTripSpec(2l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LESSARD) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5723", "5008", "5437", "1336",
/* + */"1256"/* + */, //
"1408", "1561", "1454", "7902" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7902", "1561", "1407",
/* + */"1266"/* + */, //
"1620", "5185", "5003", "5723" })) //
.compileBothTripSort());
map2.put(3l, new RouteTripSpec(3l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CROMDALE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5106", "5928", "1279", "1360", //
"1243", // ==
"1142", // !=
"1336", // !=
"1256", "1147" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1147", "1346", "1775", "1846", "1669", "5389", "5106" })) //
.compileBothTripSort());
map2.put(4l, new RouteTripSpec(4l, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"8601", // Lewis Farms Transit Centre
"5006", // West Edmonton Mall Transit Centre
"2702", // South Campus Transit Centre Fort Edmonton Park // LAST
"2714", // South Campus Transit Centre Fort Edmonton Park // CONTINUE
"2748", // ==
"22354", // != <>
"2982", // != <>
"2638", // == <>
"2625", // != <>
"2890", // == <> 114 Street & 89 Avenue
"2002", // University Transit Centre
"2065", // == 87 Street & 82 Avenue
"2593", // != 85 Street & 82 Avenue
"2196", // != 83 Street & 90 Avenue
"2952", // != 83 Street & 84 Avenue
"2159", // <> 83 Street & 82 Avenue // LAST
"2549", // 83 Street & 82 Avenue // LAST
"2447", // 83 Street & 82 Avenue // CONTINUE
"2222", // !=
"2372", // !=
"2306", // Capilano Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"2306", // Capilano Transit Centre
"2532", // !=
"2532", "2476", "2568", "2050", "2462", "2161", "2287", "2288", "2494", "2376", "2231", "2015", "2615", "2608", "2167", "2193", //
"2037", // !=
"2159", // <> 83 Street & 82 Avenue // CONTINUE
"2590", // !=
"2340", "2087", "2131", "2294", "2236", "2033", "2659", "2853", "2723", "2891", "2845", "2683", "2893", "2788", "2689", //
"2733", // !=
"2752", // ==
"22354", // != <>
"2982", // != <>
"2638", // == <>
"2625", // != <>
"2890", // == <> 114 Street & 89 Avenue
"2001", // != University Transit Centre
"2702", // South Campus Transit Centre Fort Edmonton Park
"5006", // West Edmonton Mall Transit Centre // LAST
"5003", // West Edmonton Mall Transit Centre // CONTINUE
"8601" // Lewis Farms Transit Centre
})) //
.compileBothTripSort());
map2.put(5l, new RouteTripSpec(5l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, COLISEUM, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5210", "1083", "1336", "1188",
/* + */"1051"/* + */, //
"1268", "1202" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1202", "1328", "1620", "5210" })) //
.compileBothTripSort());
map2.put(6l, new RouteTripSpec(6l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"2203", // Southgate Transit Centre
"2211", // Southgate Transit Centre
"2085", // ++
"2024", // ++
"2109", // Millgate Transit Centre
"2102", // Millgate Transit Centre
"3281", // ++
"3121", // ++
"3215", // Mill Woods Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"3215", // Mill Woods Transit Centre
"3127", // ++
"3347", // ++
"2109", // Millgate Transit Centre
"2273", // ++
"2179", // ++
"2211", // Southgate Transit Centre
"2203", // Southgate Transit Centre
})) //
.compileBothTripSort());
map2.put(7l, new RouteTripSpec(7l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"5108", // Jasper Place Transit Centre
"1881", // 124 Street & 107 Avenue
"1829", // 105 Street & 105 Avenue
"1542", // ++
"2659", // ++
"2891", // ++
"2890", // 114 Street & 89 Avenue
"2002", // University Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"2002", // University Transit Centre
"2860", // ++
"2824", // ++
"1457", // ++
"1989", // 108 Street & 104 Avenue
"1808", // ++
"5108", // Jasper Place Transit Centre
})) //
.compileBothTripSort());
map2.put(8l, new RouteTripSpec(8l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"3207", // Mill Woods Transit Centre
"3122", // ==
"3244", // !=
"3338", // !=
"3462", // !=
"3498", // !=
"3264", // ==
"2108", // Millgate Transit Centre
"1989", // 108 Street & 104 Avenue
"1106", // Kingsway RAH Transit
"1476", // 106 Street & 118 Avenue
"1201", // Coliseum Transit Centre
"1001", // Abbottsfield Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"1001", // Abbottsfield Transit Centre
"1208", // Coliseum Transit Centre
"1112", // Kingsway RAH Transit Centre
"1557", // 109 Street & 105 Avenue
"2103", // Millgate Transit Centre
"3599", // ==
"3676", // !=
"3360", // !=
"3394", // !=
"3121", // ==
"3207", // Mill Woods Transit Centre
})) //
.compileBothTripSort());
map2.put(9l, new RouteTripSpec(9l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) // SOUTHGATE
.addTripSort(MDirectionType.NORTH.intValue(), // CENTURY_PK / SOUTHGATE => EAUX_CLAIRES
Arrays.asList(new String[] { //
"4216", // Century Park Transit Centre
"2218", // == Southgate Transit Centre
"2623", // ==
"2658", // !=
"2830", "2657", // !=
"2852", // ==
"1591", // 101 Street & MacDonald Drive
"1108", // 101 Street & MacDonald Drive
"1476", // 106 Street & 118 Avenue
"7016", // Northgate Transit Centre
"6317" // Eaux Claires Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), // EAUX_CLAIRES => CENTURY_PK / SOUTHGATE
Arrays.asList(new String[] { //
"6317", // Eaux Claires Transit Centre
"7001", // Northgate Transit Centre
"1532", // 106 Street & 118 Avenue Loop
"1142", // 101 Street & MacDonald Drive nearside
"2631",// ==
"2895", "2833", // !=
"-22352", // !=
"2773", // ==
"2639", // ==
"-22223", // !=
"2218", // == Southgate Transit Centre
"2206", // Southgate Transit Centre
"4216" // Century Park Transit Centre
})) //
.compileBothTripSort());
map2.put(10l, new RouteTripSpec(10l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW_EAST_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, COLISEUM_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"1203", // Coliseum Transit Centre
"7186", // 69 Street & 144 Avenue
"7209", // Belvedere Transit Centre
"7101", // East Clareview Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"7101", // East Clareview Transit Centre
"7884", // Victoria Trail & Hooke Road
"7201", // Belvedere Transit Centre
"7572", // 66 Street & 144 Avenue
"1203", // Coliseum Transit Centre
})) //
.compileBothTripSort());
map2.put(11l, new RouteTripSpec(11l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"7007", "7186", "7106" //
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"7106", "7572", "7008", "7496", "7007" //
})) //
.compileBothTripSort());
map2.put(12L, new RouteTripSpec(12L, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"1110", // Kingsway RAH Transit Centre
"7003", // Northgate Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"7003", // Northgate Transit Centre
"1110", // Kingsway RAH Transit Centre
})) //
.compileBothTripSort());
map2.put(13l, new RouteTripSpec(13l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"6005", // Castle Downs Transit Centre
"7011", // Northgate Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"7011", // Northgate Transit Centre
"6005", // Castle Downs Transit Centre
})) //
.compileBothTripSort());
map2.put(14l, new RouteTripSpec(14l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"5011", // West Edmonton Mall Transit Centre
"5024", // 180 Street & 98 Avenue
"5153", // == 159 Street & Stony Plain Road
"5112", // != 157 Street & Stony Plain Road nearside
"5103",// != Jasper Place Transit Centre
"5293", // != 143 Street & Stony Plain Road
"1999" // != 100 Street & 103A Avenue nearside
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"1123", // 99 Street & 103A Avenue nearside
"1812", // == 111 Street & Jasper Avenue Nearside
"1828", // != 124 Street & 102 Avenue
"1971", // != 124 Street & 102 Avenue
"5185", // == 142 Street & Stony Plain Road
"5103", // Jasper Place Transit Centre
"5855", // 182 Street & 97A Avenue
"5011" // West Edmonton Mall Transit Centre
})) //
.compileBothTripSort());
map2.put(15l, new RouteTripSpec(15l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"3208", // Mill Woods Transit Centre
"2117", // Millgate Transit Centre
"1457", // 100 Street & Jasper Avenue
"1989", // 108 Street & 104 Avenue
"1227", // ++
"1532", // ++
"1476", // 106 Street & 118 Avenue
"6317", // Eaux Claires Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"6317", // Eaux Claires Transit Centre
"1532", // 106 Street & 118 Avenue Loop
"1557", // 109 Street & 105 Avenue
"1542", // 100 Street & Jasper Avenue
"2118", // Millgate Transit Centre
"3208", // Mill Woods Transit Centre
})) //
.compileBothTripSort());
map2.put(16l, new RouteTripSpec(16l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, GOV_CTR) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "1310", "7011", "6314", "6075", "6576", "6009" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "6009", "6124", "6317",
/* + */"7011"/* + */, //
"7003", "1310" })) //
.compileBothTripSort());
map2.put(17l, new RouteTripSpec(17l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"4203", // Century Park Transit Centre
"2206", // Southgate Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"2206", // Southgate Transit Centre
"4203", // Century Park Transit Centre
})) //
.compileBothTripSort());
map2.put(23l, new RouteTripSpec(23l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"5001", // West Edmonton Mall Transit Centre
"4202", // Century Park Transit Centre
"3217", // Mill Woods Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"3217", // Mill Woods Transit Centre
"4211", // Century Park Transit Centre
"5001", // West Edmonton Mall Transit Centre
})) //
.compileBothTripSort());
map2.put(24l, new RouteTripSpec(24l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"4806", // Leger Transit Centre
"9093", // Anderson Crescent W Ent & Anderson Way SW
"9095", // ++
"9096", // ++
"9097", // ++
"9098", // ++
"9241", // ++
"9244", // ++
"9245", // ++
"9246", // ++
"9673", // ++
"9405", // ++
"9633", // ++
"9815", // !=
"9057", // ==
"9630", // == Rabbit Hill Road & Ellerslie Road
"9071", // !=
"9072", // !=
"9631", // ==
"4106", // !=
"4864", // ++
"4548", // ++
"4201", // Century Park Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"4201", // Century Park Transit Centre
"4456", // ++
"4105", // ++
"4790", // !=
"9057", // ==
"9630", // == Rabbit Hill Road & Ellerslie Road
"9071", // !=
"9072", // !=
"9631", // ==
"9635", // !=
"9634", // ++
"9770", // ++
"9092", // 170 Street & Anderson Way SW
"4806", // Leger Transit Centre
})) //
.compileBothTripSort());
map2.put(25l, new RouteTripSpec(25l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "4801",
/* + */"4938"/* + */, //
/* + */"9415"/* + */, //
/* + */"9486"/* + */, //
/* + */"9557"/* + */, //
/* + */"9176"/* + */, //
/* + */"9632"/* + */, //
/* + */"9713"/* + */, //
/* + */"9094"/* + */, //
/* + */"9446"/* + */, //
/* + */"4106"/* + */, //
"4212" //
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "4212",
/* + */"9324"/* + */, //
"9632",
/* + */"9409"/* + */, //
/* + */"9553"/* + */, //
/* + */"9555"/* + */, //
/* + */"9412"/* + */, //
/* + */"9486"/* + */, //
/* + */"9415"/* + */, //
/* + */"9486"/* + */, //
"9526", "4801" //
})) //
.compileBothTripSort());
map2.put(26l, new RouteTripSpec(26l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WINDERMERE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "9460",
/* + */"9632"/* + */, //
"4808" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4808",
/* + */"9710"/* + */,//
"9460" })) //
.compileBothTripSort());
map2.put(30l, new RouteTripSpec(30l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3217", "4211", "4811", "4597", "4153", "2704" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2704", "4021", "4494", "4811", "4803", "4202", "3217" })) //
.compileBothTripSort());
map2.put(31l, new RouteTripSpec(31l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"4813", // Leger Transit Centre
"4308", // Hodgson Boulevard & Hilliard Green
"4329", // Carter Crest Road West & Rabbit Hill Road
"2208", // Southgate Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"2208", // Southgate Transit Centre
"4439", // Terwillegar Drive & 40 Avenue
"4834", // Hodgson Boulevard & Hilliard Green
"4813", // Leger Transit Centre
})) //
.compileBothTripSort());
map2.put(32l, new RouteTripSpec(32l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BRANDER_GDNS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"4025", // 148 Street & Riverbend Road nearside
"4153", // Whitemud Drive NB & 53 Avenue
"2705", // South Campus Transit Centre Fort Edmonton Park
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"2705", // South Campus Transit Centre Fort Edmonton Park
"4021", // Whitemud Drive SB & 53 Avenue
"4025", // 148 Street & Riverbend Road nearside
})) //
.compileBothTripSort());
map2.put(33l, new RouteTripSpec(33l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MEADOWS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"5001", "4021", "4040", "2973", "2205", "2215", "2118", "3713" //
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"3713", "2117", "2205", "2984", "4021", "4153", "5001" //
})) //
.compileBothTripSort());
map2.put(34l, new RouteTripSpec(34l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"4809", // Leger Transit Centre
"4069", // Bulyea Road & Burton Road S
"2209", // Southgate Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"2209", // Southgate Transit Centre
"4167", // Bulyea Road & Terwillegar Drive
"4809", // Leger Transit Centre
})) //
.compileBothTripSort());
map2.put(35l, new RouteTripSpec(35l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"4812", // Leger Transit Centre
"4935", // 156 Street & South Terwillegar Boulevard
"4367", // Rabbit Hill Road & 23 Avenue
"4215", // Century Park Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"4215", // Century Park Transit Centre
"4114", // Rabbit Hill Road & 23 Avenue
"4936", // 156 Street & 9 Avenue
"4812", // Leger Transit Centre
})) //
.compileBothTripSort());
map2.put(36l, new RouteTripSpec(36l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"4211", // Century Park Transit Centre
"4749", // !=
"4810", // <> Leger Transit Centre
"4530", // !=
"4455", // Falconer Road & Riverbend Square
"4158", // Whitemud Drive SB & 53 Avenue
"2703", // South Campus Transit Centre Fort Edmonton Park
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"2703", // South Campus Transit Centre Fort Edmonton Park
"4021", // Whitemud Drive SB & 53 Avenue
"4129", // Falconer Road & Riverbend Square
"4483", // !=
"4810", // <> Leger Transit Centre => SOUTH_CAMPUS_TC
"4804", // Leger Transit Centre
"4211", // Century Park Transit Centre
})) //
.compileBothTripSort());
map2.put(37l, new RouteTripSpec(37l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"4802", // Leger Transit Centre
"4117", // Towne Centre Boulevard & Terwillegar Boulevard
"4754", // McLay Crescent W & MacTaggart Drive
"4215", // Century Park Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"4215", // Century Park Transit Centre
"4643", // Rabbit Hill Road & Terwillegar Boulevard
"4856", // Towne Centre Boulevard & Terwillegar Boulevard
"4802", // Leger Transit Centre
})) //
.compileBothTripSort());
map2.put(38l, new RouteTripSpec(38l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"4805", // Leger Transit Centre
"4519", // !=
"4122", // !=
"4938", // !=
"4455", // !=
"4427", // ==
"4288", // ++
"4469", // ==
"4597", // != Riverbend Road & Rabbit Hill Road
"4191", // !=
"4041", // ==
"4037", // 143 Street & 53 Avenue
"4038", // ++
"4031", // ++
"4034", // 144 Street & 60 Avenue
"4279", // ==
"4040", // != Whitemud Drive SB & 53 Avenue
"2207", // Southgate Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"2207", // Southgate Transit Centre
"4020", // !=
"4041", // ==
"4037", // 143 Street & 53 Avenue
"4038", // ++
"4031", // ++
"4034", // 144 Street & 60 Avenue
"4279", // ==
"4021", // !=
"4126", // !=
"4427", // ==
"4288", // ++
"4469", // ==
"4042", // == Riverbend Road & Rabbit Hill Road
"4373", // !=
"4262", // !=
"4320", // !=
"4749", // !=
"4805", // Leger Transit Centre
})) //
.compileBothTripSort());
map2.put(39l, new RouteTripSpec(39l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RUTHERFORD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"9242", // 117 Street & Rutherford Road SW
"9685", // McMullen Green & MacEwan Road SW
"4213", // Century Park Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"4213", // Century Park Transit Centre
"9666", // 111 Street & MacEwan Road SW
"9242", // 117 Street & Rutherford Road SW
})) //
.compileBothTripSort());
map2.put(40l, new RouteTripSpec(40l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, YELLOWBIRD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"4290", // 105 Street & 21 Avenue #Yellowbird
"4118", // ++
"4206", // Century Park Transit Centre
"4224", // ++
"4054", // ++
"2203", // Southgate Transit Centre
"2211" // Southgate Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"2203", // Southgate Transit Centre
"2211", // Southgate Transit Centre
"4490", // ++
"4164", // ++
"4205", // Century Park Transit Centre
"4467", // ++
"4290" // 105 Street & 21 Avenue #Yellowbird
})) //
.compileBothTripSort());
map2.put(41l, new RouteTripSpec(41l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4208", "4168", "2213" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2213", "4486", "4208" })) //
.compileBothTripSort());
map2.put(42l, new RouteTripSpec(42l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4209", "4070", "2217" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2217", "4342", "4209" })) //
.compileBothTripSort());
map2.put(43l, new RouteTripSpec(43l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"4214", // Century Park Transit Centre
"4151", // == 111 Street & Saddleback Road N Ent
"4543", // != 112 Street & Saddleback Road North Ent
"4156", // != Saddleback Road & 27 Avenue
"4547", // != 112 Street & Saddleback Road North Ent
"4493", // != 116 Street & 30 Avenue
"4154", // == 117 Street & 28 Avenue
"2711", // South Campus Transit Centre Fort Edmonton Park
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"2711", // South Campus Transit Centre Fort Edmonton Park
"4337", // == 117 Street & 28 Avenue
"4096", // != Saddleback Road & 27 Avenue
"4166", // != 113 Street & Saddleback Road N Ent
"4566", // != 116 Street & 30 Avenue
"4245", // != 112 Street & 29A Avenue
"4088", // == 112 Street & Saddleback Road North Ent
"4214", // Century Park Transit Centre
})) //
.compileBothTripSort());
map2.put(44l, new RouteTripSpec(44l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, TWIN_BROOKS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4265",
/* + */"4233"/* + */, //
"4204", "4210", "4362", "2204" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2204", "4198", "4204",
/* + */"4348"/* + */, //
"4265" })) //
.compileBothTripSort());
map2.put(45l, new RouteTripSpec(45l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4207", "4588", "2214" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2214", "2888", "4198", "4207" })) //
.compileBothTripSort());
map2.put(46l, new RouteTripSpec(46l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HARRY_AINLAY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, YELLOWBIRD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4290", "4209", "4307" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4490", "4208", "4290" })) //
.compileBothTripSort());
map2.put(47l, new RouteTripSpec(47l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ALLARD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"9301", // Allard Boulevard & Alexander Way SW
"9163", // Callaghan Drive & Callaghan Point
"4548", // == 111 Street & 23 Avenue
"4214", // != Century Park Transit Centre
"4206" // != Century Park Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"4206", // != Century Park Transit Centre
"4214", // != Century Park Transit Centre
"4456", // == 111 Street & 23 Avenue
"9164", // Callaghan Drive & Callaghan Close
"9301" // Allard Boulevard & Alexander Way SW
})) //
.compileBothTripSort());
map2.put(48l, new RouteTripSpec(48l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BLACKBURNE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "9226",
/* + */"4002"/* + */, //
"4204" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4204",
/* + */"9551"/* + */, //
"9226" })) //
.compileBothTripSort());
map2.put(49l, new RouteTripSpec(49l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BLACKMUD_CRK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "9756", "9542", "4210" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4210",
/* + */"4105"/* + */, //
"9756" })) //
.compileBothTripSort());
map2.put(50l, new RouteTripSpec(50l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2210", "4277", "2517", "2957", "2710" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2710", "2510", "2924", "4474", "2210" })) //
.compileBothTripSort());
map2.put(51l, new RouteTripSpec(51l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, PARKALLEN) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"2795", // 112 Street & 65 Avenue nearside
"2752", // ==
"2982", // !=
"22354", // !=
"2638", // ==
"2890", // 114 Street & 89 Avenue
"2001", // University Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"2001", // University Transit Centre
/* + */"2889"/* + */, //
"2795", // 112 Street & 65 Avenue nearside
})) //
.compileBothTripSort());
map2.put(52l, new RouteTripSpec(52l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"2212", // Southgate Transit Centre
"2887", //
"2849", // 104 Street & 81 Avenue
"2632", //
"2162", // ==
"1425", // >>>>>>
"-1425", // !=
"1728", //
"1991", //
"1308", // Government Transit Centre
"1794", // ==
"1769", // !=
"1693", // !=
"1711", // !=
"1271", // !=
"1777",// ==
"1777", // 103 Street & Jasper Avenue
"11321", //
"1292", // 100 Street & 102A Avenue
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"1292", // 100 Street & 102A Avenue
"1262", //
"1620", // ==
"1673", // !=
"1964", // !=
"1949", // !=
"1708", // !=
"1941", // ==
"1305", // Government Transit Centre
"1792", //
"1629", //
"1993", //
"-1425", // !=
"1425", // <<<<<<<
"1567", // ==
"2768", //
"2899", //
"2821", // 104 Street & 82 Avenue
"2665", //
"2212" // Southgate Transit Centre
})) //
.compileBothTripSort());
map2.put(53l, new RouteTripSpec(53l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2216", "2973", "2712" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2712", "2511", "2216" })) //
.compileBothTripSort());
map2.put(54l, new RouteTripSpec(54l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2710", "2891", "2001" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2001", "2821", "2710" })) //
.compileBothTripSort());
map2.put(55l, new RouteTripSpec(55l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2202", "2830", "2709" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2709", "2966", "2202" })) //
.compileBothTripSort());
map2.put(57l, new RouteTripSpec(57l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"2002", // University Transit Centre
"2860", // ++
"2824", // ++
"1246", // ++
"1364", // ++
"1358", // 99 Street & 104 Avenue
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"1358", // 99 Street & 104 Avenue
"1608", // ++
"2659", // ++
"2891", // ++
"2890", // 114 Street & 89 Avenue
"2002", // University Transit Centre
})) //
.compileBothTripSort());
map2.put(59l, new RouteTripSpec(59l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_EDM_COMMON) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "3440", "3003", "3209" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3209", "3007", "3440" })) //
.compileBothTripSort());
map2.put(60l, new RouteTripSpec(60l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3210", "3585", "2104", "2101", "1780", "1989" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1824", "1780", "2104", "3233", "3210" })) //
.compileBothTripSort());
map2.put(61l, new RouteTripSpec(61l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3211", "3585", "2105", "2104", "1780", "1989" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1824", "1780", "2105", "3529", "3211" })) //
.compileBothTripSort());
map2.put(62l, new RouteTripSpec(62l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHWOOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"3154", // Mill Woods Road E & 20 Avenue
"3128", // !=
"3126", // ==
"3212", // Mill Woods Transit Centre
"3127", // ==
"3087", // !=
"1989", // 108 Street & 104 Avenue
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"1824", // 108 Street & 104 Avenue
"3090", // !=
"3126", // ==
"-33219", // !=
"3203", // Mill Woods Transit Centre
"3127", // ==
"3129", // !=
"3154", // Mill Woods Road E & 20 Avenue
})) //
.compileBothTripSort());
map2.put(63l, new RouteTripSpec(63l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHWOOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"3143", // 48 Street & Mill Woods Road S
"3165", // ++
"3167", // ???
"3169", // ???
"3171", // ???
"3173", // ???
"3254", // ???
"3148", // ???
"3146", // ???
"3144", // ???
"3142", // ???
"3140", // ???
"3065", // ???
"3067", // ???
"3069", // ???
"3071", // ???
"3073", // ???
"3075", // ???
"3077", // ???
"3079", // ???
"3081", // ???
"3083", // ???
"3085", // ???
"3130", // ???
"3128", // !=
"3126", // ==
"3204", // == Mill Woods Transit Centre
"3212", // != Mill Woods Transit Centre
"3127", // ==
"3087", // !=
"1358", // 99 Street & 104 Avenue
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"1358", // 99 Street & 104 Avenue
"3090", // !=
"3126", // ==
"3204", // == Mill Woods Transit Centre
"3127", // ==
"3129", // !=
"3141", // !=
"3143", // 48 Street & Mill Woods Road S
})) //
.compileBothTripSort());
map2.put(64l, new RouteTripSpec(64l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KNOTTWOOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3026", "3006", "3001", "3208", "2111", //
"1246", "1609", "1364", //
"1358" //
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1358", //
"1609", "1570", "1608", //
"2112", "3208", "3009", "3026" //
})) //
.compileBothTripSort());
map2.put(65l, new RouteTripSpec(65l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KNOTTWOOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3023", "3006", "3001", "3208", "2111", "1989" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1824", "2112", "3208", "3009", "3023" })) //
.compileBothTripSort());
map2.put(66l, new RouteTripSpec(66l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3213", "3011", "2101", "2105", //
"1246", "1609", "1364", //
"1358" //
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1358", //
"1609", "1570", "1608", //
"2101", "3011", "3003", "3213" //
})) //
.compileBothTripSort());
map2.put(67l, new RouteTripSpec(67l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MEADOWS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "3206", "3952", "3957", "3708" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3708", "3957", "3950", "3311", "3116", "3206" })) //
.compileBothTripSort());
map2.put(68l, new RouteTripSpec(68l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), Arrays.asList(new String[] { "3202", "3399", "3586", "2107", "2110", "1989" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), Arrays.asList(new String[] { "1824", "2107", "3230", "3584", "3202" })) //
.compileBothTripSort());
map2.put(69l, new RouteTripSpec(69l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"3214", // Mill Woods Transit Centre
"3695", // ==
"3400", //
"3506", // !=
"3702", // == Meadows Transit Centre
"3705", // == Meadows Transit Centre
"3124", //
"3722", // !=
"2024", // !=
"2110", // Millgate Transit Centre => MILL_WOODS_TC
"2107", // Millgate Transit Centre => DOWNTOWN
"2026", // ++
"1989" // 108 Street & 104 Avenue
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"2110", // Millgate Transit Centre
"2371", // !=
"3953", // !=
"3710", // == Meadows Transit Centre
"3611", //
"3653", // !=
"3411", // ==
"3214" // Mill Woods Transit Centre
})) //
.compileBothTripSort());
map2.put(70l, new RouteTripSpec(70l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"3201", // Mill Woods Transit Centre
"2697", // == 99 Street & 82 Avenue
"2659", // != 99 Street & 82 Avenue STOP
"2824", // != 99 Street & 83 Avenue CONTINUE
"1190", // == McDougall Hill & Grierson Hill
"1262", // != 100 Street & Jasper Avenue
"1292", // != 100 Street & 102A Avenue
"1457", // != 100 Street & Jasper Avenue
"1780", // != 103 Street & 102 Avenue
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"1292", // != 100 Street & 102A Avenue
"1780", // != 103 Street & 102 Avenue
"1322", // != 103 Street & Jasper Avenue
"1336", // != 101 Street & Jasper Avenue
"1542", // == 100 Street & Jasper Avenue
"2878", // != 99 Street & 85 Avenue
"2659", // != 99 Street & 82 Avenue
"2840", // == 99 Street & 81 Avenue
"3201" // Mill Woods Transit Centre
})) //
.compileBothTripSort());
map2.put(71l, new RouteTripSpec(71l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, GOV_CTR, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"3216", // Mill Woods Transit Centre
"3337", // ++
"1153", // 106 Street & 97 Avenue
"1614", // 109 Street & 97 Avenue
"1303", // Government Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"1303", // Government Transit Centre
"1993", // 106 Street & 97 Avenue
"3543", // ++
"3216", // Mill Woods Transit Centre
})) //
.compileBothTripSort());
map2.put(72l, new RouteTripSpec(72l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, // MILLGATE
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3206", "3255", "3796", "3491", "2106", "2106", "2110", "1989" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1989", "2110", "2106", "3355", "3748", "3185", "3206" })) //
.compileBothTripSort());
map2.put(73l, new RouteTripSpec(73l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"2203", // Southgate Transit Centre
"2211", // Southgate Transit Centre
"2888", "2102", "3002", //
"3205" // Mill Woods Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"3205", // Mill Woods Transit Centre
"3010", "2109", //
"2203", // Southgate Transit Centre
"2211" // Southgate Transit Centre
})) //
.compileBothTripSort());
map2.put(74l, new RouteTripSpec(74l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "2204", "4202",
/* + */"3671"/* + */, //
"3107", "3559", "3209" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3209", "3007", "3430", "3110", "4202", "4212", "2204" })) //
.compileBothTripSort());
map2.put(77l, new RouteTripSpec(77l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "4210", "9850", "9111", "3205" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3205", "9761", "9361", "4210" })) //
.compileBothTripSort());
map2.put(78l, new RouteTripSpec(78l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "4205", "3675", "9384", "9725", "3215" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3215", "9147", "9387", "3865", "4205" })) //
.compileBothTripSort());
map2.put(79l, new RouteTripSpec(79l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "4207", "3319", "9260", "9139", "3214" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3214", "9287", "9671", "3513", "4207" })) //
.compileBothTripSort());
map2.put(80l, new RouteTripSpec(80l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "2218", "2769", "2826", "2551", "2599", "2223", "2305" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2305", "2152", "2264", "2188", "2622", "2837", "2888", /* + */"2630"/* + */, "2218" })) //
.compileBothTripSort());
map2.put(81l, new RouteTripSpec(81l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3216", "2106", "2338", //
/* + */"2697"/* + */, "2659",/* + */"2824"/* + */, //
/* + */"1246"/* + */, "1383", //
"1246", "1609", "1364", //
"1358" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1358", //
"1609", "1570", "1608", //
"1383", /* + */"2835"/* + */, //
/* + */"2878"/* + */, /* ? */"2659"/* ? */, "2840", //
"2385", "2106", "2104", "3216" })) //
.compileBothTripSort());
map2.put(82l, new RouteTripSpec(82l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3212", "2339", "2551", "1383", //
"1246", "1609", "1364", //
"1358" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1358", //
"1609", "1570", "1608", //
"1383", "2255", "2528", "3212" })) //
.compileBothTripSort());
map2.put(83l, new RouteTripSpec(83l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MEADOWS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"1383", //
"1542", // !=
"2196", // ==
"2393", // ==
"2952", // !=
"2188", // ==
"2572", // !=
"2805", //
"2911", // ==
"2536", "2235", // !=
"2362", "2136", // !=
"2078", // ==
"2034", // !=
"2143", // ==
"2286", // !=
"2943", "2813", // !=
"2431", // ==
"2468", // ==
"2415", // !=
"2693", "2259", // ==
"22189", // !=
"3706" //
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"3706", //
"22349", // !=
"22188", // ==
"2693", "2259", // ==
"22178", // !=
"2389", // ==
"2148", "2913", // !=
"2357", "2598", // !=
"2802", // ==
"2804", "2551", //
"2329", // !=
"2196", // ==
"1457", // !=
"1383" //
})) //
.compileBothTripSort());
map2.put(84l, new RouteTripSpec(84l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2111", "2303" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2303", "2112" })) //
.compileBothTripSort());
map2.put(85l, new RouteTripSpec(85l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"1383", //
"2434", // ==
"2059", // !=
"2985", "2560", // 1=
"2379", // ==
"2307" //
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"2307", "1383", //
})) //
.compileBothTripSort());
map2.put(86l, new RouteTripSpec(86l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1358", //
"1609", "1570", "1608", //
"2073", "2302" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2302", "2442", //
"1246", "1609", "1364", //
"1358" })) //
.compileBothTripSort());
map2.put(87l, new RouteTripSpec(87l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2106", "2338", "2824", "1383", //
"1246", "1609", "1364", //
"1358" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1358", //
"1609", "1570", "1608", //
"1383", "2285", "2385", "2106" })) //
.compileBothTripSort());
map2.put(88l, new RouteTripSpec(88l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1680", "1336", "2274", "2449", "2307" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2307", "2298", "2267", "1718" })) //
.compileBothTripSort());
map2.put(89l, new RouteTripSpec(89l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MEADOWS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, TAMARACK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"3691", // Tamarack Green & 35 Ave
"3608", // ++
"3610", // ++
"3192", // ++
"3193", // !=
"3505", // !=
"3193", // ++
"3979", // Maple Rd & Loop
"3773", // ++
"3781", // !=
"3613", // Tamarack Way & 38 Ave
"3711", // Meadows TC
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] {//
"3711", // Meadows TC
"3851", // 19 St & 35 Ave
"3605", // ++
"3691" // Tamarack Green & 35 Ave
})) //
.compileBothTripSort());
map2.put(90l, new RouteTripSpec(90l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MEADOWS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1824", "2255", "3707" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3709", "2551", "1989" })) //
.compileBothTripSort());
map2.put(91l, new RouteTripSpec(91l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHLANDS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2307",
/* + */"2425"/* + */, //
"1371" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1371", "1131", "2307" })) //
.compileBothTripSort());
map2.put(92l, new RouteTripSpec(92l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, PLYPOW, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "2101", "2118", "2876", /* + */"22330"/* + */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /*-"2876"-*//* + */"22330"/* + */, /* + */"22196"/* + */, "2118", "2101" })) //
.compileBothTripSort());
map2.put(94l, new RouteTripSpec(94l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"2002", // University Transit Centre
"2860", // ++
"2447", // ++
"2274", // ++
"2449", // ++
"2303", // Capilano Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"2303", // Capilano Transit Centre
"2298", // ++
"2591", // ++
"2159", // ++
"2891", // ++
"2752", // ==
"2982", // != 114 Street & 83 Avenue
"22354", // !=
"2638", // ==
"2002", // University Transit Centre
})) //
.compileBothTripSort());
map2.put(95l, new RouteTripSpec(95l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAUREL, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"3213",
/* + */"3189"/* + */, //
/* + */"3952"/* + */, //
/* + */"3618"/* + */, //
/* + */"3303"/* + */, //
"3305", "3703" //
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"3703", "3303",
/* + */"3761"/* + */, //
/* + */"3620"/* + */, //
"3213" //
})) //
.compileBothTripSort());
map2.put(96l, new RouteTripSpec(96l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BONNIE_DOON, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2110", /* + */"2433"/* + */, "2196" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2196", /* + */"2074"/* + */, "2110" })) //
.compileBothTripSort());
map2.put(97l, new RouteTripSpec(97l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NAIT, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3217", "3008", "2111", "1702", "1059" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1093", "1824", "2112", "3002", "3217" })) //
.compileBothTripSort());
map2.put(98l, new RouteTripSpec(98l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NAIT, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5219", "1059" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1093", "5003" })) //
.compileBothTripSort());
map2.put(99l, new RouteTripSpec(99l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELVEDERE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2304", "1206", "7211" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7211", "1207", "2304" })) //
.compileBothTripSort());
map2.put(100l, new RouteTripSpec(100l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1243", "1812", /* + */"5449"/* + */, /* + */"5001"/* + */, "5010", "8610" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "8610", "5001", /* + */"5054"/* + */, "1083", "1256", "1243" })) //
.compileBothTripSort());
map2.put(101l, new RouteTripSpec(101l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEDGEWOOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* + */"5968"/* + */, "5908", "5821", "5002" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5002", "5979", "5660", /* + */"5968"/* + */})) //
.compileBothTripSort());
map2.put(102l, new RouteTripSpec(102l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LYMBURN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5828", "5725", "5004" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5004", "5755", "5828" })) //
.compileBothTripSort());
map2.put(103l, new RouteTripSpec(103l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAMERON_HTS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5752", "5695", "5821", "5002" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5002", "5979", "5623", "5752" })) //
.compileBothTripSort());
map2.put(104l, new RouteTripSpec(104l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LYMBURN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /*-"5755",-*/"5828", /* + */"5725"/* + */, "5821", "2706" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2706", /*-"5725"-,*//* + */"5755"/* + */,/* + */"5828"/* + */})) //
.compileBothTripSort());
map2.put(105l, new RouteTripSpec(105l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LESSARD) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* + */"5017"/* + */, /* + */"5932"/* + */, /* "-5634-", */"5733", "5821", "2706" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2706", /* "-5932-", *//* + */"5634"/* + */,/* + */"5017"/* + */})) //
.compileBothTripSort());
map2.put(106l, new RouteTripSpec(106l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LESSARD) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"5733", // 172 Street & Callingwood Road
"5650", //
"5900", "5757", "5722", "5638", "5671", "5974", "5821", "5749", "5923", "5750", //
"5463", //
"5004", // West Edmonton Mall Transit Centre END
"5007", // West Edmonton Mall Transit Centre CONTINUE
"5054", //
"5186", "5486", "5566", "5578", "5359", "5281", "5197", "5332", "5451", "5499", "5298", "4425", "22162", "2978", //
"22159", //
"2713", // South Campus Transit Centre Fort Edmonton Park
"2885", //
"22157", "2959", "2944", "2505", "2516", //
"2748", // ==
"2982", // !=
"22354", // !=
"2638", // ==
"2625", // ++
"2890", // 114 Street & 89 Avenue
"2001", // University Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"2001", // University Transit Centre
"2641", // ++
"5004", // West Edmonton Mall Transit Centre
"5733", // 172 Street & Callingwood Road
})) //
.compileBothTripSort());
map2.put(107l, new RouteTripSpec(107l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTRIDGE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5747", "5657", "5005" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5005", "5877", "5747" })) //
.compileBothTripSort());
map2.put(108l, new RouteTripSpec(108l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BRECKENRIDGE_GRNS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "8670", "8279", "8608" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "8608", "8999", "8670" })) //
.compileBothTripSort());
map2.put(109l, new RouteTripSpec(109l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5012", "5874", /* + */"5366"/* + */, "5111", /* + */"5250"/* + */, "5344", "1496" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1571", "5132", "5111", "5903", "5012" })) //
.compileBothTripSort());
map2.put(110l, new RouteTripSpec(110l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTRIDGE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5005", "5877", "5747" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5747", "5811", "5811", "5005" })) //
.compileBothTripSort());
map2.put(111l, new RouteTripSpec(111l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5001", "5795", "5109", "1620" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1620", "5104", "5846", "5001" })) //
.compileBothTripSort());
map2.put(112l, new RouteTripSpec(112l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5013", "5433", "5344", "1910",
/* + */"1824"/* + */, //
"1542", "2122", "2302" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2302", "2497", "1457",
/* + */"1989"/* + */, //
"1878", "5132", "5038", "5013" })) //
.compileBothTripSort());
map2.put(113l, new RouteTripSpec(113l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5001", "5069", "5104" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5104", "5151", "5001" })) //
.compileBothTripSort());
map2.put(114l, new RouteTripSpec(114l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTVIEW_VLG) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "8846", "8941", "5105" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5105", "8904", "8849", "8846" })) //
.compileBothTripSort());
map2.put(115l, new RouteTripSpec(115l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"5013", "5433", "5344", "5209", //
"5549", // ==
"1759", // !=
"1867", // !=
"1665", // !=
"6122", // ==
"6333", "7011" //
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"7011", //
"-77862", //
"6348", //
"6369", "6289", // ==
"5173", // !=
"6372", // !=
"1932", // !=
"5090", // ==
"5203", "5132", "5038", "5013"//
})) //
.compileBothTripSort());
map2.put(117l, new RouteTripSpec(117l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, THE_HAMPTONS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"5010", // West Edmonton Mall Transit Centre
"5819", // != 189 Street & 87 Avenue
"8607", // <> Lewis Farms Transit Centre
"8536", // != West Henday Promenade Access & Webber Greens Drive
"8135", // ++ Guardian Road & Whitemud Drive
"8106", // 199 Street & 62 Avenue
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"8106", // 199 Street & 62 Avenue
"8390", // == 199 Street & Pipeline Compressor Station
"8034", // ?? 199 Street & Christian Assembly Driveway
"8430", // ?? 199 Street & Fieldstone Estates Driveway
"8361", // == 199 Street & 69 Avenue
"8033", // ++ Guardian Road & Whitemud Drive
"8406", // == != Suder Greens Drive & Webber Greens Drive
"8607", // != <> Lewis Farms Transit Centre => THE_HAMPTONS
"8605", // !=Lewis Farms Transit Centre => WEST_EDM_MALL
"5783", // == 187 Street & 87 Avenue
"5010", // West Edmonton Mall Transit Centre
})) //
.compileBothTripSort());
map2.put(118l, new RouteTripSpec(118l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RIO_TERRACE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5174", "5302", "5103" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5103", "5301", "5174" })) //
.compileBothTripSort());
map2.put(119l, new RouteTripSpec(119l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, THE_HAMPTONS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "8583", "8097", "8033", "8607" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "8607", "8135", "8097", "8046", "8583" })) //
.compileBothTripSort());
map2.put(120l, new RouteTripSpec(120l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STADIUM, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5110", "1242", "1083", "1336", "1407" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1407", "1328", "1620", "1746", "5110" })) //
.compileBothTripSort());
map2.put(121l, new RouteTripSpec(121l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5205", "5215", "6345", "6646", "7011" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7011", "6183", "6371", "5404", "5205" })) //
.compileBothTripSort());
map2.put(122l, new RouteTripSpec(122l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5012", "8389", "5928", "5330", "5207" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5207", "5143", "5389", "8952", "5012" })) //
.compileBothTripSort());
map2.put(123l, new RouteTripSpec(123l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5105", "8691", "5648", "5374", "5205" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5205", "5692", "5635", "8684", "5105" })) //
.compileBothTripSort());
map2.put(124l, new RouteTripSpec(124l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE) // MISTATIM_IND
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"5106", "6231", "5204" //
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"5204", "6781", "5106" //
})) //
.compileBothTripSort());
map2.put(125l, new RouteTripSpec(125l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELVEDERE, // DOWNTOWN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"5101", // Jasper Place Transit Centre
"5469", // !=
"5448",// 161 Street & 109 Avenue
"5127", // !=
"5202", // == Westmount Transit Centre
"5098", // !=
"11326", // ==
"1105", // == Kingsway RAH Transit Centre LAST
"1107", // Kingsway RAH Transit Centre
"1401", // Stadium Transit Centre
"1044", // ==
"1209", // == Coliseum Transit Centre LAST
"1205", // Coliseum Transit Centre
"7205", // Belvedere Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"7205", // Belvedere Transit Centre
"1357", // !=
"1209", // Coliseum Transit Centre
"1148", // !=
"1402", // Stadium Transit Centre
"1032", // !=
"1105", // == Kingsway RAH Transit Centre
"1053", // !=
"5077", // ==
"5202", // == Westmount Transit Centre LAST
"5209", // Westmount Transit Centre
"5112", // !=
"5101", // Jasper Place Transit Centre
})) //
.compileBothTripSort());
map2.put(126l, new RouteTripSpec(126l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5013", "8882", "8590", "5928", "5208" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5208", "5389", "8500", "8952", "5013" })) //
.compileBothTripSort());
map2.put(127l, new RouteTripSpec(127l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELVEDERE, // 7205
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT) // 5204
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5204", "1110", "1401", "1209", "1205", "7205" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7205", "1209", "1402", "1110", "1105", "5204" })) //
.compileBothTripSort());
map2.put(128l, new RouteTripSpec(128l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY) //
.addTripSort(MDirectionType.NORTH.intValue(), // CASTLE_DOWNS
Arrays.asList(new String[] { //
"2890", // 114 Street & 89 Avenue
"2002", // University Transit Centre
"2638", // 114 Street & 85 Avenue
"5206", // Westmount Transit Centre
"6191", // !=
"6333", // <> 127 Street & 129 Avenue
"6553", // !=
"6458", // !=
"6006", // Castle Downs Transit Centre END >> UNIVERSITY
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), // UNIVERSITY
Arrays.asList(new String[] { //
"6006", // Castle Downs Transit Centre
"6137", // !=
"6366", // ++ 127 Street & 131 Avenue
"6333", // <> 127 Street & 129 Avenue
"6435", // !=
"6369", // 127 Street & 129 Avenue
"6289", // ++
"2890", // 114 Street & 89 Avenue
})) //
.compileBothTripSort());
map2.put(129l, new RouteTripSpec(129l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5012", "8740", "8740", "5960", "5208" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5208", "5936", "8740", "5012" })) //
.compileBothTripSort());
map2.put(130l, new RouteTripSpec(130l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"2002", // University Transit Centre
"1700", // ++
"1532", // 106 Street & 118 Avenue Loop
"1476", // ++
"7002", // Northgate Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"7002", // Northgate Transit Centre
"1532", // 106 Street & 118 Avenue Loop
"1855", // ++
"2890", // 114 Street & 89 Avenue
"2002", // University Transit Centre
})) //
.compileBothTripSort());
map2.put(133L, new RouteTripSpec(133L, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY, // S_CAMPUS_FT_EDM
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"8606", // Lewis Farms Transit Centre
"8602", // Lewis Farms Transit Centre
"5001", // West Edmonton Mall Transit Centre
"2748", // ==
"2982", // !=
"22354", // !=
"2638", // ==
"2890", // 114 Street & 89 Avenue
"2002", // University Transit Centre
"2890", // 114 Street & 89 Avenue
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"2890", // 114 Street & 89 Avenue
"2002", // University Transit Centre
"5010", // West Edmonton Mall Transit Centre
"8602", // Lewis Farms Transit Centre
"8606", // Lewis Farms Transit Centre
})) //
.compileBothTripSort());
map2.put(134l, new RouteTripSpec(134l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "1251", "1237", "7002" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7002", "1372", "1251" })) //
.compileBothTripSort());
map2.put(136L, new RouteTripSpec(136L, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, THE_HAMPTONS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"8583", // 215 Street & Hope Road
"8089", // Glastonbury Boulevard & 69 Avenue
"8033", // ++
"8602", // ++
"5010", // West Edmonton Mall Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"5010", // West Edmonton Mall Transit Centre
"8609", // ++
"8135", // ++
"8177", // ++
"8046", // 199 Street & 62 Avenue
"8583", // 215 Street & Hope Road
})) //
.compileBothTripSort());
map2.put(137l, new RouteTripSpec(137l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5010", "8882", "6850", /* + */"7011" /* + */, "7002", "7908" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7908", "7011", "6118", "8861", "5010" })) //
.compileBothTripSort());
map2.put(138l, new RouteTripSpec(138l, // TODO not exactly: same loop for the 2 trips
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEDGEWOOD) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /*-"5627"-*//* + */"5968"/* + */, /* + */"5888"/* + */, /* + */"5789"/* + */, //
"5983", "5747", "2707" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2707", "5747", "5719",//
/* + */"5627"/* + */, /* + */"5858"/* + */, /* + */"5968"/* + *//*-"5789"-*/})) //
.compileBothTripSort());
map2.put(139l, new RouteTripSpec(139l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, THE_HAMPTONS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "8457", "8106", "8033", "2707" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2707", "8135", "8457", "8460" })) //
.compileBothTripSort());
map2.put(140l, new RouteTripSpec(140l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAGO_LINDO, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "1251", "1040", "7003", "7010",
/* + */"7748"/* + */, //
"7377" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7377",
/* + */"7042"/* + */, //
"7003", "1380", "1251" })) //
.compileBothTripSort());
map2.put(141l, new RouteTripSpec(141l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, COLISEUM) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1204", "1561", "1002", "1003" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1002", "1003", "1031", "1204" })) //
.compileBothTripSort());
map2.put(142l, new RouteTripSpec(142l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, COLISEUM) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1207", "1521", "1001" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1001", "1367", "1207" })) //
.compileBothTripSort());
map2.put(143l, new RouteTripSpec(143l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MONTROSE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, KINGSWAY) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1111", "1476", "1441", "1205", "1260" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1260", "1213", "1278", "1075", "1111" })) //
.compileBothTripSort());
map2.put(145l, new RouteTripSpec(145l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAGO_LINDO, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, _82_ST_132_AVE) // EAUX_CLAIRES
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"6315", "7377",
/* + */"7388"/* + */
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
/* + */"7388"/* + */, //
/* + */"7483"/* + */, //
"6315", "6317", "7358", "7165" //
})) //
.compileBothTripSort());
map2.put(149l, new RouteTripSpec(149l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "6308", "7736", "7113", "7904" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7904", "7153", "7959", "6308" })) //
.compileBothTripSort());
map2.put(150l, new RouteTripSpec(150l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"5007", "5107", //
"5207", //
"5549", // ==
"1759", // !=
"1867", // !=
"1665", // !=
"6122", // ==
"6333", "7011", "7010", "6303" //
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"6303", "7011", //
"-77862", //
"6369", //
"6289", // ==
"5173", // !=
"6372", // !=
"1932", // !=
"5090", // ==
"5203", "5102", "5007" //
})) //
.compileBothTripSort());
map2.put(151l, new RouteTripSpec(151l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KING_EDWARD_PK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { // CASTLE_DOWNS
"2253", // 71 Street & 77 Avenue
"2432", // 91 Street & 82 Avenue
"1251", // == 102 Street & MacDonald Drive
"1346", // 101 Street & 101A Avenue
"1237", // 101 Street & 117 Avenue
"1043", // != 97 St & Yellowhead Tr Nearside
"6496", // == 97 Street & 128 Avenue
"6421", // != 102 Street & 127 Avenue
"6571", // ==
"6333", // !=
"6553", // !=
"6020", // !=
"6434", // !=
"6292", // != 127 Street & 129 Avenue LAST
"6328", // !=
"6132", // !=
"6487", // ==
"6333", // 127 Street & 129 Avenue
"6004", // Castle Downs Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { // KING_EDWARD_PK
"6004", // Castle Downs Transit Centre
"6366", // !=
"6292", // 127 Street & 129 Avenue
"6123", // !=
"6116", // == 103 Street & 127 Avenue
"6496", // == 97 Street & 128 Avenue LAST
"6266", // 101 Street & 128 Avenue
"1372", // 101 Street & 117 Avenue
"1243", // == 101 Street & 101A Avenue
"1251", // == 102 Street & MacDonald Drive LAST
"1142", // 101 Street & MacDonald Drive nearside CONTINUE
"2079", // 91 Street & 83 Avenue
"2253", // 71 Street & 77 Avenue
})) //
.compileBothTripSort());
map2.put(152l, new RouteTripSpec(152l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELVEDERE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7003", "7074", "7208" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7208", "7221", "7003" })) //
.compileBothTripSort());
map2.put(153l, new RouteTripSpec(153l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELVEDERE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7008", /* + */"7143"/* + */, "7204" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7204", /* + */"7043"/* + */, "7008" })) //
.compileBothTripSort());
map2.put(154l, new RouteTripSpec(154l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELVEDERE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7009", "7592", "7202" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7202", "7123", "7009" })) //
.compileBothTripSort());
map2.put(155l, new RouteTripSpec(155l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, RAPPERSWILL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "6823", /* + */"6416"/* + */, "6313" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "6313", /* + */"6078"/* + */, "6823" })) //
.compileBothTripSort());
map2.put(157l, new RouteTripSpec(157l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, REMAND_CTR) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "6379",
/* + */"6077"/* + */, //
"6302" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "6302",
/* + */"6720"/* + */, //
"6379" })) //
.compileBothTripSort());
map2.put(160l, new RouteTripSpec(160l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, OXFORD, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, GOV_CTR) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "1304", "1820", "6348", "6243", "6835", "6676" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "6835", "6676", "6442", "6594", "1304" })) //
.compileBothTripSort());
map2.put(161l, new RouteTripSpec(161l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MAC_EWAN_GOV_CTR) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"1309", // != Government Transit Centre START
"1035", // !=
"1824", // != 108 Street & 104 Avenue START
"1845", // !=
"1271", // ==
"7579", // !=
"7009", // <> Northgate Transit Centre
"66112", // !=
"6580", // ++
"6007", // Castle Downs Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"6007", // Castle Downs Transit Centre
"6396", // ++
"6141", // == !=
"7009", // != <> Northgate Transit Centre => CASTLE_DOWNS
"7003", // != Northgate Transit Centre
"1673", // ==
"1740", // !=
"1989", // != 108 Street & 104 Avenue END
"1622", // !=
"1309", // !≃ Government Transit Centre END
})) //
.compileBothTripSort());
map2.put(162l, new RouteTripSpec(162l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MAC_EWAN_GOV_CTR) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"1309", // != Government Transit Centre START
"1035", // !=
"1824", // != 108 Street & 104 Avenue START
"1845", // !=
"1271", // ==
"7579", // !=
"6311", // <> Eaux Claires Transit Centre
"6033", // ++
"6008", // Castle Downs Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"6008", // Castle Downs Transit Centre
"6340", // ++
"6362", // ==
"6311", // != <> Eaux Claires Transit Centre => CASTLE_DOWNS
"6310", // != Eaux Claires Transit Centre
"1622", // ==
"1740", // !=
"1989", // != 108 Street & 104 Avenue END
"1964", // !=
"1309", // != Government Transit Centre END
})) //
.compileBothTripSort());
map2.put(163l, new RouteTripSpec(163l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CHAMBERY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"6312", /* + */"7463"/* + */, /* + */"7748"/* + */, //
/* + */"7381"/* + */, "6194", /* + */"6767"/* + */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
/* + */"6767"/* + */, "6598", /* + */"6854"/* + */, /* + */"6147"/* + */, /* + */"6362"/* + */, //
/* + */"6074"/* + */, /* + */"6076"/* + */, "6236", /* + */"7482"/* + */, "6312" })) //
.compileBothTripSort());
map2.put(164l, new RouteTripSpec(164l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RAPPERSWILL, // CANOSSA, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"7015", // == Northgate Transit Centre
"66112", // !=
"6612", // !=
"6148", // !=
"6235", // !=
"6468", // ++
"6356", // ==
"6001", // Castle Downs Transit Centre
"6783", // ==
"6949", // !=
"6205", // <> 115 Street & 175 Avenue
"6950", // !=
"6202" // 127 Street & 167 Avenue
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"6202", // 127 Street & 167 Avenue
"6871", // !=
"6105", // !=
"6205", // <>
"6338", // !=
"6184", // ==
"6575", // ==
"6340", // !=
"6584", // !=
"6077", // !=
"6236", // !=
"6021", // !=
"6225", // ==
"6010", // Castle Downs Transit Centre
"6101", // ==
"6478", // !=
"6588", // == Griesbach Road & 146 Avenue
"6404", // != Sir Arthur Currie Way & Greisbach Road
"6141", // != 102 Street & 137 Avenue
"6125", // != 104 Street & Griesbach Road
"6361", // != 97 Street & 144 Avenue
"7015", // == Northgate Transit Centre
})) //
.compileBothTripSort());
map2.put(165l, new RouteTripSpec(165l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, _85_ST_132_AVE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "7496", "6130", "6522", "6011", "6127" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "6074", "6010", "6396", "6579", "7299" })) //
.compileBothTripSort());
map2.put(166l, new RouteTripSpec(166l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, GRIESBACH) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"6112", // Pegasus Boulevard & Stan Walters Avenue
/* + */"6612"/* + */, //
"7015" // Northgate Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"7015", // Northgate Transit Centre
/* + */"6260"/* + */, //
"6112" // Pegasus Boulevard & Stan Walters Avenue
})) //
.compileBothTripSort());
map2.put(167l, new RouteTripSpec(167l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS + SLASH + _82_ST, // Castle Downs-82 St
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, _95_ST_132_AVE) // 95A Street & 132 Avenue
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
/* no stops */
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"6039", // 100 Street & 158 Avenue
"6317", // Eaux Claires Transit Centre
"7353", // 87 Street & 144 Avenue
"7060", // 95A Street & 132 Avenue
})) //
.compileBothTripSort());
map2.put(168l, new RouteTripSpec(168l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "7011", "6243", "6619", "6835", //
"6725", //
"6003", "6305" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "6305", "6011", //
/* + */"6228"/* + */, //
/* + */"6698"/* + */, //
/* + */"6725"/* + */, //
/* + */"6256"/* + */, //
/* + */"6566"/* + */, //
/* + */"6261"/* + */, //
/* + */"6114"/* + */, //
"6676", "6853", "6442", "7011" })) //
.compileBothTripSort());
map2.put(169l, new RouteTripSpec(169l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CANOSSA, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"7015", // == Northgate Transit Centre
"7448", "6286", // !=
"6148", "6468", // !=
"6356", // ==
"6001", "6166", "6194", //
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"6194", "6456", "6010", //
"6101", // ==
"6478", "6343", // !=
"6460", "6536", // !=
"6361", // ==
"7015", // Northgate Transit Centre
})) //
.compileBothTripSort());
map2.put(180l, new RouteTripSpec(180l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"1824", "6304", "7736", "7456", "7207", "7642", "1002" //
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1002", "7639", "7203", "7384", "7959",
/* + */"6304"/* + */, //
"6317",
/* + */"6594"/* + */, //
/* + */"1850"/* + */, //
"1989" //
})) //
.compileBothTripSort());
map2.put(181l, new RouteTripSpec(181l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELVEDERE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7206", //
/* + */"7650"/* + */, //
/* + */"7186"/* + */, //
"7384", "7241", "7604", "7901" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7901", "7414", "7400", "7456", //
/* + */"7164"/* + */, //
/* + */"7479"/* + */, //
/* + */"7650"/* + */, //
/* + */"7265"/* + */, //
/*-"7186",-*///
"7206" })) //
.compileBothTripSort());
map2.put(182l, new RouteTripSpec(182l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, FRASER, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7003", "7186", "7104", "7470" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7470", "7105", "7572", "7003" })) //
.compileBothTripSort());
map2.put(183l, new RouteTripSpec(183l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "1002", "7668", "7885", "7102" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7102", "7983", "7729", "1002" })) //
.compileBothTripSort());
map2.put(184l, new RouteTripSpec(184l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EVERGREEN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "7903", "7262", "7128" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7128", "7262", "7903" })) //
.compileBothTripSort());
map2.put(185l, new RouteTripSpec(185l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "1002",
/* + */"7954"/* + */, //
"7102" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7102",
/* + */"7744"/* + */, //
"1002" })) //
.compileBothTripSort());
map2.put(186l, new RouteTripSpec(186l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, FRASER, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7358", "7286", "7206", "7104", "7470" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7470", "7105", "7205", "7120", "7011" })) //
.compileBothTripSort());
map2.put(187l, new RouteTripSpec(187l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, KERNOHAN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LONDONDERRY) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7230", "7103", "7756", "7943" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7943", "7103", "7102", "7185" })) //
.compileBothTripSort());
map2.put(188l, new RouteTripSpec(188l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LONDONDERRY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "6309", "7230", "7186", "7907", "7729" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7668", "7904", "7549", "7185", "7188", "6309" })) //
.compileBothTripSort());
map2.put(190l, new RouteTripSpec(190l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "6306", "7763", "7803", "7054", "7906" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7906", "7384", "7815", "7674", "6306" })) //
.compileBothTripSort());
map2.put(191l, new RouteTripSpec(191l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KLARVATTEN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "6307", //
/* + */"7865"/* + */, //
"7827" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
/* + */"7827"/* + */, //
/* + */"7825"/* + */, //
"7434", //
/* + */"7795"/* + */, //
"7779", "6307" })) //
.compileBothTripSort());
map2.put(192l, new RouteTripSpec(192l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BRINTNELL, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "7909",
/* + */"7512"/* + */, //
"7984" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7984",
/* + */"7603"/* + */, //
"7909" })) //
.compileBothTripSort());
map2.put(193l, new RouteTripSpec(193l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BRINTNELL, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "7910",
/* + */"7992"/* + */, //
"7414" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7414",
/* + */"77280"/* + */, //
"7910" })) //
.compileBothTripSort());
map2.put(194l, new RouteTripSpec(194l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SCHONSEE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "6308", "7677", "7919" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7919", "7674", "6308" })) //
.compileBothTripSort());
map2.put(195l, new RouteTripSpec(195l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_CONACHIE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"7907", // West Clareview Transit Centre
"7879", // ++
"7308" // 59A Street & McConachie Way
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"7308", // 59A Street & McConachie Way
"77335", // ==
"77428", // !=
"7018", // McConachie Boulevard & 176 Avenue
"77607", // !=
"77424", // ==
"77436", // ==
"7907", // West Clareview Transit Centre
})) //
.compileBothTripSort());
map2.put(197l, new RouteTripSpec(197l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EDM, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SPRUCE_GRV) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "8737", "8785", "8761", "5415", //
/* + */"1595"/* + */, //
"1223", "1850", "1479" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1570", "1679", "1227", //
/* + */"1187"/* + */, //
"5389", "8730", "8743", "8737" })) //
.compileBothTripSort());
map2.put(198l, new RouteTripSpec(198l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, FT_SASKATCHEWAN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7908",
/* + */"77175"/* + */, //
"7405" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7405", "7926", "7908" })) //
.compileBothTripSort());
map2.put(199l, new RouteTripSpec(199l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EDM_GARRISON, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "6316",
/* + */"7873"/* + */, //
"7895" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7895", "7873", "6316" })) //
.compileBothTripSort());
map2.put(211l, new RouteTripSpec(211l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW_WEST_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1643", "1321", "7903" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7903", "1620", "1643" })) //
.compileBothTripSort());
map2.put(301l, new RouteTripSpec(301l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"4204", // Century Park Transit Centre
"4065", "4547", "4186", //
"2203", // Southgate Transit Centre
"2211" // Southgate Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"2203", // Southgate Transit Centre
"2211", // Southgate Transit Centre
"4275", "4543", "4443", //
"4204", // Century Park Transit Centre
})) //
.compileBothTripSort());
map2.put(302l, new RouteTripSpec(302l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EVERGREEN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "7103",
/* + */"7689",/* + *///
"7262", "7654", "7128" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7128", "7654", "7591",
/* + */"7855",/* + *///
"7103" })) //
.compileBothTripSort());
map2.put(303l, new RouteTripSpec(303l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MISTATIM_IND) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"7011", // Northgate Transit Centre
"6348", // ==
"6472", // ><
"6930", // ><
"6484", // ==
"6233", // ==
"6183", // 142 Street & 134 Avenue
"6727" // 159 Street & 131 Avenue Nearside
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"6727", // 159 Street & 131 Avenue Nearside
"6677", // ==
"66139", // !=
"6178", // ==
"6524", // ++
"6472", // ><
"6930", // ><
"7579", // ++
"7011" // Northgate Transit Centre
})) //
.compileBothTripSort());
map2.put(304l, new RouteTripSpec(304l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHPARK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4569",
/* + */"2076",/* + *///
"2218" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2218", "2888",
/* + */"4183",/* + *///
"4569" })) //
.compileBothTripSort());
map2.put(305l, new RouteTripSpec(305l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_GATES) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5668", "5082", "5528", "", "5208", "5214" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5214", "1481", "1861", "5205", "5055", "5335", "5668" })) //
.compileBothTripSort());
map2.put(306l, new RouteTripSpec(306l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MEADOWS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BONNIE_DOON) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"2808", // Bonnie Doon Safeway
"2805", // ++ Girard Road & 76 Avenue
"2415", // !=
"2693", // == 17 Street & Oak Ridge Drive
"2259", // ==
"22189", // !=
"3706" // Meadows Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"3706", // Meadows Transit Centre
"22188", // !=
"2693", // == 17 Street & Oak Ridge Drive
"2259", // ==
"22178", // !=
"2804", // ++
"2159", // ++
"2808", // Bonnie Doon Safeway
})) //
.compileBothTripSort());
map2.put(307l, new RouteTripSpec(307l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, GOLD_BAR, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BONNIE_DOON) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "2196", "2304", "2012",
/* + */"2068"/* + */, //
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] {
/* + */"2068"/* + */, //
"2475", "2305", "2196" //
})) //
.compileBothTripSort());
map2.put(308l, new RouteTripSpec(308l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, RIVERDALE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"1123",
/* + */"1280"/* + */, //
/* + */"1549"/* + */, //
"1893" //
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"1893",
/* + */"1510"/* + */, //
/* + */"1953"/* + */, //
/* + */"1914"/* + */, //
"1254",
/* + */"1498"/* + */, //
/* + */"1120"/* + */, //
"1262", "1123" //
})) //
.compileBothTripSort());
map2.put(309l, new RouteTripSpec(309l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KINGSWAY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RIVERDALE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"1254", // 92 Street & 101A Avenue
"1620", // == 101 Street & Jasper Avenue
"1673", // != 103 Street & Jasper Avenue
"1964", // != 107 Street & Jasper Avenue
"1949", // != 103 Street & 100 Avenue
"1708", // != 105 Street & 100 Avenue
"1941", // == 107 Street & 100 Avenue
"1705", // !=
"1293", // <> 110 Street & 100 Avenue
"1961", // 1=
"1942", // ++
"1960", // ++
"1978", // ++
"1104", // ++
"1366" // 101 Street & 111 Avenue
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"1366", // 101 Street & 111 Avenue
"1455", // Kingsway Mall
"1834", // ++
"1141", // ++
"1856", // !=
"1293", // == <> 110 Street & 100 Avenue
"1711", // != 107 Street & 100 Avenue
"1271", // != 105 Street & Jasper Avenue
"1769", // != 107 Street & 100 Avenue
"1299", // != 103 Street & Jasper Avenue
"1322", // == 103 Street & Jasper Avenue
"1256", // Thornton Court & Jasper Avenue
"1893", // ++
"1254", // 92 Street & 101A Avenue
})) //
.compileBothTripSort());
map2.put(310l, new RouteTripSpec(310l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RIO_TERRACE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5174", "5302", "5383", "5105" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5105", "5491", "5301", "5174" })) //
.compileBothTripSort());
map2.put(311l, new RouteTripSpec(311l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5011", "5222", "5836", "5105" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5105", "5851", "5325", "5011" })) //
.compileBothTripSort());
map2.put(312l, new RouteTripSpec(312l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LONDONDERRY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7008", "7754", "7944" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7944", "7754", "7008" })) //
.compileBothTripSort());
map2.put(313L, new RouteTripSpec(313L, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, PARKALLEN) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"2795", // 112 Street & 65 Avenue nearside
"2689", // ++
"2002", // University Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"2002", // University Transit Centre
"2971", // 117 Street & University Avenue
"2001", // University Transit Centre
"2690", // ++
"2795", // 112 Street & 65 Avenue nearside
})) //
.compileBothTripSort());
map2.put(315L, new RouteTripSpec(315L, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WINTERBURN_IND, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"8609", // Lewis Farms Transit Centre
"8536", // ==
"8087", // !=
"8175", // <> 215 Street & Secord Boulevard
"8146", // <>
"8123", // <>
"8066", // <> 217 Street & 94B Avenue
"8080", // <> Secord Drive & Secord Boulevard
"8061", // <> 218 Street & Secord Blvd
"8078", // !=
"8694", // !=
"8163", // <> 215 St & Westview Blvd
"8955", // <>
"8938", // <>
"8989", // <> Lakeview Drive & Westview Boulevard
"8975", // <> Westview Village & Lakeview Drive
"8144", // !=
"8727", // 220 Street & 115 Avenue
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"8727", // 220 Street & 115 Avenue
"8369", // !=
"8163", // <> 215 St & Westview Blvd
"8955", // <>
"8938", // <>
"8989", // <> Lakeview Drive & Westview Boulevard
"8975", // <> Westview Village & Lakeview Drive
"8945", // !=
"8065", // !=
"8175", // <> 215 Street & Secord Boulevard
"8146", // <>
"8123", // <>
"8066", // <> 217 Street & 94B Avenue
"8080", // <> Secord Drive & Secord Boulevard
"8061", // <> 218 Street & Secord Blvd
"8068", // !=
"8609", // Lewis Farms Transit Centre
})) //
.compileBothTripSort());
map2.put(316l, new RouteTripSpec(316l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HAWKS_RDG, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "8603", "6824", "6408", "6709" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "6709", "6617", "6825", "8603" })) //
.compileBothTripSort());
map2.put(317l, new RouteTripSpec(317l, // TODO better (same stops in both trips in different orders)
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WINTERBURN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"8989", "8967", "8943", "8975", "8927", "8163", "8846", "8975", "8945", //
"8941", "5105" //
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"5105", "8904", //
"8694", "8927", "8163", "8846", "8975", "8927", "8163", "8955", "8938", "8989" //
})) //
.compileBothTripSort());
map2.put(318l, new RouteTripSpec(318l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, COLISEUM) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1208", "1070", "1001", "1491", "1002" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1002", "1340", "1208" })) //
.compileBothTripSort());
map2.put(321l, new RouteTripSpec(321l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA_IND) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "3733", "3744", "2106" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2106",
/* + */"3481"/* + */, //
"3733" })) //
.compileBothTripSort());
map2.put(322l, new RouteTripSpec(322l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HOLYROOD, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BONNIE_DOON) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2808", "2585", "2841",
/* + */"2246"/* + */, //
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] {
/* + */"2246"/* + */, //
"2613", "2808" })) //
.compileBothTripSort());
map2.put(323l, new RouteTripSpec(323l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BONNIE_DOON, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, RITCHIE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "2419", "2313", "2808" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2808",
/* + */"2294"/* + */, //
"2419" })) //
.compileBothTripSort());
map2.put(324l, new RouteTripSpec(324l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, AMBLESIDE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "9092", "9630", "4201" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "4201", "9635", "9092" })) //
.compileBothTripSort());
map2.put(325l, new RouteTripSpec(325l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WINDERMERE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "9632", "9526", "4801" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "4801",
/* + */"4938"/* + */, //
"9632" })) //
.compileBothTripSort());
map2.put(327l, new RouteTripSpec(327l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELGRAVIA) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "2765", "2680", "2821" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2821",
/* + */"2648"/* + */, //
"2765" })) //
.compileBothTripSort());
map2.put(330l, new RouteTripSpec(330l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4811", "4597", "4153", "2704", })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2704", "4021", "4494", "4811" })) //
.compileBothTripSort());
map2.put(331l, new RouteTripSpec(331l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CHAPPELLE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "9285",
/* + */"9270"/* + */, //
/* + */"9271"/* + */, //
/* + */"9272"/* + */, //
/* + */"9366"/* + */, //
/* + */"9281"/* + */, //
/* + */"9382"/* + */, //
"4216" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4216",
/* + */"9044"/* + */, //
/* + */"9187"/* + */, //
/* + */"9273"/* + */, //
/* + */"9274"/* + */, //
/* + */"9368"/* + */, //
/* + */"9263"/* + */, //
/* + */"9264"/* + */, //
/* + */"9265"/* + */, //
"9285" })) //
.compileBothTripSort());
map2.put(333l, new RouteTripSpec(333l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ROSENTHAL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "8167",
/* + */"8852"/* + */, //
"8604" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "8604",
/* + */"8168"/* + */, //
"8167" })) //
.compileBothTripSort());
map2.put(334l, new RouteTripSpec(334l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "4809",
/* + */"4626"/* + */, //
"4215" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "4215",
/* + */"4642"/* + */, //
"4809" })) //
.compileBothTripSort());
map2.put(336l, new RouteTripSpec(336l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4810", "4455", "4069", "2208", })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2208", "4167", "4129", "4810" })) //
.compileBothTripSort());
map2.put(337l, new RouteTripSpec(337l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "4802", "4117", "4110", "4215", })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "4215", "4941", "4856", "4802" })) //
.compileBothTripSort());
map2.put(338l, new RouteTripSpec(338l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BLACKBURNE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "9226", "4201", "4813", "4597", "4034", "2207", })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2207", "4034", "4042", "4805", "4204", "9226" })) //
.compileBothTripSort());
map2.put(339l, new RouteTripSpec(339l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RUTHERFORD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "9251", "9685", "4213", })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4213", "9756", "9251" })) //
.compileBothTripSort());
map2.put(340l, new RouteTripSpec(340l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"3217", // Mill Woods Transit Centre
"3122", // == Hewes Way & 27 Avenue
"3244", // != Youville Drive W & 28 Avenue
"3338", // != 65 Street & 28 Avenue
"3462", // != Youville Drive W & 28 Avenue
"3498", // != 66 Street & 31 Avenue
"3264", // == 67 Street & 28 Avenue
"3482", // ++
"2102", // Millgate Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"2102", // Millgate Transit Centre
"3448", // ++
"3217", // Mill Woods Transit Centre
})) //
.compileBothTripSort());
map2.put(347l, new RouteTripSpec(347l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ALLARD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "9717", "9685", "4213", })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4213", "9666", "9717" })) //
.compileBothTripSort());
map2.put(360l, new RouteTripSpec(360l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ORCHARDS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "9306",
/* + */"9050"/* + */, //
"4216", })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4216",
/* + */"9051"/* + */, //
"9306" })) //
.compileBothTripSort());
map2.put(361l, new RouteTripSpec(361l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3210", "3585", "2105", })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2105", "3529", "3210" })) //
.compileBothTripSort());
map2.put(362l, new RouteTripSpec(362l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHWOOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3082",
/* + */"3149"/* + */, //
"3211", })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "3211", "3009", "3082" })) //
.compileBothTripSort());
map2.put(363l, new RouteTripSpec(363l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHWOOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3066", "3003", "3215", })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "3215",
/* + */"3174"/* + */, //
"3066" })) //
.compileBothTripSort());
map2.put(370l, new RouteTripSpec(370l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3206", "3957", "3796", "2106", })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2106", "3748", "3950", "3206" })) //
.compileBothTripSort());
map2.put(380l, new RouteTripSpec(380l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, QUARRY_RDG, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7903",
/* + */"7587"/* + */, //
"7213" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7213",
/* + */"77430"/* + */, //
"7903" })) //
.compileBothTripSort());
map2.put(381l, new RouteTripSpec(381l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HOLLICK_KENYON, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "7905",
/* + */"7982"/* + */, //
"7151", })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7151",
/* + */"7808"/* + */, //
"7905" })) //
.compileBothTripSort());
map2.put(399l, new RouteTripSpec(399l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, COLISEUM, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CONCORDIA) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "1517",
/* + */"1015"/* + */, //
"1209" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1209",
/* + */"1131"/* + */, //
"1517" })) //
.compileBothTripSort());
map2.put(512l, new RouteTripSpec(512l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "1336", "1408", "1211", "7212", "7903" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7903", "7212", "1210", "1407", "1620" })) //
.compileBothTripSort());
map2.put(517l, new RouteTripSpec(517l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW_WEST_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, COLISEUM) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"1211", "7903" //
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"7903", "1211" //
})) //
.compileBothTripSort());
map2.put(560l, new RouteTripSpec(560l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SPRUCE_GRV) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"5389", // 154 Street & 118 Avenue
"8730", // == Century Road & Grove Drive
"8743", // == Aspenglen Drive & Grove Drive
"8737", // == King Street & McLeod Avenue
"8785", // == Century Road & McLeod Avenue
"8761", // == Century Road & Grove Drive
"1890", // 109 Street & Princess Elizabeth Avenue
"1983", // 105 Street & 104 Avenue
"1479", // 97 Street & 103A Avenue
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"1570", // 101 Street & 103A Avenue
"1679", // 105 Street & 104 Avenue
"1860", // 109 Street & Princess Elizabeth Avenue
"8730", // == Century Road & Grove Drive
"8743", // == Aspenglen Drive & Grove Drive
"8737", // == King Street & McLeod Avenue
"8785", // == Century Road & McLeod Avenue
"8761", // == Century Road & Grove Drive
"5415", // 154 Street & 119 Avenue
})) //
.compileBothTripSort());
map2.put(561l, new RouteTripSpec(561l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NAIT, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Acheson") //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"8169", "1890" //
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"1860", "8169" //
})) //
.compileBothTripSort());
map2.put(562l, new RouteTripSpec(562l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, // WEST_EDM_MALL
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SPRUCE_GRV) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"8207", // Jennifer Heil Way & Grove Drive
"5219", // 175 Street & 87 Avenue
"2708" // South Campus Transit Centre Fort Edmonton Park
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"2708", // South Campus Transit Centre Fort
"5014", // West Edmonton Mall Transit Centre
"8207", // Jennifer Heil Way & Grove Drive
})) //
.compileBothTripSort());
map2.put(577l, new RouteTripSpec(577l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHLANDS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, _84_ST_111_AVE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1408",
/* + */"1094"/* + */, //
"1371" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1371",
/* + */"1180"/* + */, //
"1408" })) //
.compileBothTripSort());
map2.put(580l, new RouteTripSpec(580l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, FT_SASKATCHEWAN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW_WEST_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"7908", // West Clareview Transit Centre
"77162", // Southfort Drive & South Point Shopping Fort Sask
"7405", // Dow Centennial Centre Fort Sask
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"7405", // Dow Centennial Centre Fort Sask
"7926", // 95 Street & 96 Avenue Fort Sask
"7908" // West Clareview Transit Centre
})) //
.compileBothTripSort());
map2.put(589l, new RouteTripSpec(589l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EDM_WASTE_MGT_CTR, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, COLISEUM) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1211", "7700",
/* + */"7701"/* + */, //
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] {
/* + */"7700"/* + */, //
"7701", "1211" })) //
.compileBothTripSort());
map2.put(591l, new RouteTripSpec(591l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHLANDS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2307", "2359", "1371" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1371", "2594", "2307" })) //
.compileBothTripSort());
map2.put(594l, new RouteTripSpec(594l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Valley Zoo", //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"5219", // 175 Street & 87 Avenue
"5332", // 152 Street & 87 Avenue
"5095", // 133 Street & Buena Vista Road
"5015" // Valley Zoo Parking Lot
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"5015", // Valley Zoo Parking Lot
"5095", // 133 Street & Buena Vista Road
"5610", // 155 Street & 87 Avenue
"5219" // 175 Street & 87 Avenue
})) //
.compileBothTripSort());
map2.put(595l, new RouteTripSpec(595l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, FT_EDM) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"4476", // Fort Edmonton
"2978", // ++
"2706" // South Campus Transit Centre Fort Edmonton Park
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"2706",// South Campus Transit Centre Fort Edmonton Park
"22160", // ++
"4476" // Fort Edmonton
})) //
.compileBothTripSort());
map2.put(596l, new RouteTripSpec(596l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, VLY_ZOO_FT_EDM) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5015", "4476", "2706" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2706", "4476", "5015" })) //
.compileBothTripSort());
map2.put(599l, new RouteTripSpec(599l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EDM_GARRISON, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"6316", // Eaux Claires Transit Centre
"7991", // 97 Street & 176 Avenue
"7873", // C Ortona Road & Churchill Avenue Garrison
"7681", // Ortona Road & Ubique Avenue Garrison
"7412", // Korea Road & Ortona Road Garrison
"7895" // B Hindenburg Line Road & Churchill Avenue Garrison
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"7895", // B Hindenburg Line Road & Churchill Avenue Garrison
"7406", // Highway 28A & Mons Avenue Garrison
"7873", // C Ortona Road & Churchill Avenue Garrison
"7681", // Ortona Road & Ubique Avenue Garrison
"6854", // 97 Street & 176 Avenue
"6316" // Eaux Claires Transit Centre
})) //
.compileBothTripSort());
map2.put(601l, new RouteTripSpec(601l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ROSS_SHEPPARD, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEDGEWOOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5627", "5908", "5983", "5548", "5392" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(602l, new RouteTripSpec(602l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ROSS_SHEPPARD, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ORMSBY_PL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5755", "5828", "5725", "5874", "5548", "5392" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(606l, new RouteTripSpec(606l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CARLTON, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "6603", "6853", "6293", "6369", "5211", "5548" })) //
.compileBothTripSort());
map2.put(607l, new RouteTripSpec(607l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"6822", "6293", "6369", "5211", "5548" //
})) //
.compileBothTripSort());
map2.put(608l, new RouteTripSpec(608l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BEAUMARIS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"6593", "6027", //
"6369", // ==
"6372", "1664", // !=
"5173", // !=
"5090", // ==
"5211", "5548" //
})) //
.compileBothTripSort());
map2.put(609l, new RouteTripSpec(609l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BATURYN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"6593", "6053", //
"6369", // ==
"6372", "1664", // !=
"6289", "5173", // !=
"5356", // ==
"5211", "5548" //
})) //
.compileBothTripSort());
map2.put(610l, new RouteTripSpec(610l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DUNLUCE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "6131", "6177", "5211", "5548" })) //
.compileBothTripSort());
map2.put(612l, new RouteTripSpec(612l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, OXFORD, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "6410", "6695", "5211", "5548" })) //
.compileBothTripSort());
map2.put(613l, new RouteTripSpec(613l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RUTHERFORD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"9356", // 125 Street & 20 Avenue SW
"4213" // Century Park Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"4213", // Century Park Transit Centre
"9356" // 125 Street & 20 Avenue SW
})) //
.compileBothTripSort());
map2.put(617l, new RouteTripSpec(617l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KLARVATTEN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CARDINAL_LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7827", "7795", "7659" })) //
.compileBothTripSort());
map2.put(618l, new RouteTripSpec(618l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MATT_BERRY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JJ_BOWLEN) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7992", "7058", "7449", "7545" })) //
.compileBothTripSort());
map2.put(620l, new RouteTripSpec(620l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELVEDERE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, AOB) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7210", "1207", "2915" })) //
.compileBothTripSort());
map2.put(621l, new RouteTripSpec(621l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, AOB) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1002", "2553" })) //
.compileBothTripSort());
map2.put(635l, new RouteTripSpec(635l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5210", "1481", "1242", "1083", "1393" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(689l, new RouteTripSpec(689l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WINDSOR_PARK, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2851", "2974" })) //
.compileBothTripSort());
map2.put(697l, new RouteTripSpec(697l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"4810", // Leger Transit Centre
"4455", // Falconer Road & Riverbend Square
"4158", // Whitemud Drive SB & 53 Avenue
"2703", // South Campus Transit Centre Fort Edmonton Park
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
/* no stops *///
})) //
.compileBothTripSort());
map2.put(698l, new RouteTripSpec(698l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_PHERSON, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JACKSON_HTS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "3230", "3964" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(699l, new RouteTripSpec(699l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_PHERSON, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JACKSON_HTS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "3355", "3400", "3603" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(701l, new RouteTripSpec(701l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELMEAD) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5914", "5001" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(702l, new RouteTripSpec(702l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, PARKVIEW, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ORMSBY_PL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5881", "5828", "5725", "5198" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(703l, new RouteTripSpec(703l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CRESTWOOD, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ST_FRANCIS_XAVIER) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5421", "5038", "5174", "5941" })) //
.compileBothTripSort());
map2.put(705l, new RouteTripSpec(705l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTLAWN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"8602", // Lewis Farms Transit Centre
"5001", // West Edmonton Mall Transit Centre
"5029", // == 163 Street & 88 Avenue
"5577", // ?? 163 Street & 92 Avenue
"5991",// ?? 163 Street & 92 Avenue
"5522", // == 163 Street & 92 Avenue
"5069" // 165 Street & 95 Avenue
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(706l, new RouteTripSpec(706l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Jasper Pl TC", // _157_ST_100A_AVE
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE) // High School
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5106", "5177" })) //
.compileBothTripSort());
map2.put(707l, new RouteTripSpec(707l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, OSCAR_ROMERO, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, THE_HAMPTONS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "8670", "8135", "5986" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(708l, new RouteTripSpec(708l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, // not TC
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5012", "5874", "5221", "5109" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(709l, new RouteTripSpec(709l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MEADOWLARK) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5359", "5437", "1256" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(710l, new RouteTripSpec(710l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ROSS_SHEPPARD, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5001", "5174", "5588", "5392" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(711l, new RouteTripSpec(711l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ROSS_SHEPPARD, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "8603", "5013", "5929", "5433", "5180", "5896" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(712l, new RouteTripSpec(712l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, HILLCREST, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ORMSBY_PL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5755", "5828", "5894" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(717l, new RouteTripSpec(717l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, VICTORIA, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5001", "1426" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(723l, new RouteTripSpec(723l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HADDOW) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4941", "4319", "4815", "4069", "2974" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(725l, new RouteTripSpec(725l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KINGSWAY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "" })) // NO STOPS
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1111", "1857", "1939", "2002" })) //
.compileBothTripSort());
map2.put(726l, new RouteTripSpec(726l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ROSS_SHEPPARD, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4808", "4249", "5511", "5180", "5896" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(728l, new RouteTripSpec(728l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BROOKSIDE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "4034", "4029", "2710", "2974" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(729l, new RouteTripSpec(729l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "4815", "4246", "2974" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(730l, new RouteTripSpec(730l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAGO_LINDO, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7377", "6317", "7016", "5548" })) //
.compileBothTripSort());
map2.put(731l, new RouteTripSpec(731l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, KINGSWAY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"5204", // Westmount Transit Centre
"1105" // Kingsway RAH Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"1105", // Kingsway RAH Transit Centre
"5204" // Westmount Transit Centre
})) //
.compileBothTripSort());
map2.put(733l, new RouteTripSpec(733l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5001", "2714", "2002" })) //
.compileBothTripSort());
map2.put(734l, new RouteTripSpec(734l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAGO_LINDO, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MARY_BUTTERWORTH) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7377", "7483", "6236" })) //
.compileBothTripSort());
map2.put(735l, new RouteTripSpec(735l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5006", "5156", "2714", "2002" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(738l, new RouteTripSpec(738l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4815", "4158", "2709" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(741l, new RouteTripSpec(741l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WP_WAGNER, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KNOTTWOOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3023", "3001", "2111", "2189" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(744l, new RouteTripSpec(744l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MAYLIEWAN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, QUEEN_ELIZABETH) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7547", "7441", "7925", "7060" })) //
.compileBothTripSort());
map2.put(739l, new RouteTripSpec(739l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LENDRUM) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"2745", // 109 Street & 65 Avenue
"2002", // University Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
/* no stops *///
})) //
.compileBothTripSort());
map2.put(747l, new RouteTripSpec(747l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EDM_INT_AIRPORT) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "9747", "4216" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4216", "9747" })) //
.compileBothTripSort() //
.addBothFromTo(MDirectionType.SOUTH.intValue(), "4216", "4216") //
.addBothFromTo(MDirectionType.NORTH.intValue(), "9747", "9747")); //
map2.put(748l, new RouteTripSpec(748l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAGO_LINDO, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CARDINAL_LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7377", "6309", "7353" })) //
.compileBothTripSort());
map2.put(750l, new RouteTripSpec(750l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EASTGLEN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7015", "7165", "1203", "1033" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(753l, new RouteTripSpec(753l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LONDONDERRY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7572", "7007" })) //
.compileBothTripSort());
map2.put(755l, new RouteTripSpec(755l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, OXFORD, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_O_LEARY) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "6452", "6695", "6628", "6442", "7358", "7165" })) //
.compileBothTripSort());
map2.put(756l, new RouteTripSpec(756l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LA_ZERTE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "6001", "6340", "6310", "7186" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(757l, new RouteTripSpec(757l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, _127_ST_129_AVE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
/* no stops *///
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"6369", // 127 Street & 129 Avenue
"1965", // 127 Street & 122 Avenue
"5201", // Westmount Transit Centre
"2515", // ++
"2002", // University Transit Centre
})) //
.compileBothTripSort());
map2.put(760l, new RouteTripSpec(760l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_NALLY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LARKSPUR) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3247", "3586", "2676" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(761l, new RouteTripSpec(761l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2285", "2974" })) //
.compileBothTripSort());
map2.put(762l, new RouteTripSpec(762l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_NALLY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, AVONMORE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2339", "2447", "2544", "2267", "2676" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(763l, new RouteTripSpec(763l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BONNIE_DOON, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2159", "2891", "2001" })) //
.compileBothTripSort());
map2.put(764l, new RouteTripSpec(764l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2301", "2267", "1620" })) //
.compileBothTripSort());
map2.put(765l, new RouteTripSpec(765l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, RHATIGAN_RIDGE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "4461", "4249", "2974" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(767l, new RouteTripSpec(767l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3217", "3011", "2111", "2974" })) //
.compileBothTripSort());
map2.put(768l, new RouteTripSpec(768l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WP_WAGNER) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3280", "3556", "3212", "3007", "2111", "2189" })) //
.compileBothTripSort());
map2.put(769l, new RouteTripSpec(769l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WP_WAGNER, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3211", "3585", "2111", "2189" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(771l, new RouteTripSpec(771l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WP_WAGNER, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CRAWFORD_PLAINS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3143", "3217", "3002", "2111", "2189" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(773l, new RouteTripSpec(773l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WP_WAGNER, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3217", "3585", "2111", "2189" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(774l, new RouteTripSpec(774l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HARDISTY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SILVERBERRY) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3255", "3708", "3740", "3491", "2915", "2177" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(776l, new RouteTripSpec(776l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_NALLY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3217", "3796", "3586", "2676" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(777l, new RouteTripSpec(777l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MEADOWS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3703", "3560", "3217" })) //
.compileBothTripSort());
map2.put(778l, new RouteTripSpec(778l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_NALLY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3217", "3255", "3491", "2676" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(779l, new RouteTripSpec(779l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HARDISTY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3217", "3255", "3491", "2915", "2177" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(780l, new RouteTripSpec(780l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HARDISTY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3217", "3796", "3586", "2915", "2177" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(781l, new RouteTripSpec(781l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_NALLY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2105", "2551", "2676" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(782l, new RouteTripSpec(782l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KENILWORTH, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3211", "3585", "2111", "2255", "2487", "2160" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(783l, new RouteTripSpec(783l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KENILWORTH, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, GREENVIEW) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3328", "3537", "2160" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(784l, new RouteTripSpec(784l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_NALLY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MEADOWS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3255", "3708", "3740", "3491", "2676" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(785l, new RouteTripSpec(785l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HARDISTY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WILDROSE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3247", "3491", "2915", "2177" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(786l, new RouteTripSpec(786l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, AVALON, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2202", "2518" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(787l, new RouteTripSpec(787l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "2212", "2778", "2974" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(789l, new RouteTripSpec(789l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WP_WAGNER, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHWOOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3143", "3217", "2189" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(790l, new RouteTripSpec(790l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BEARSPAW) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4205", "4290", "4203", "4157", "4431", "2218" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(791l, new RouteTripSpec(791l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RUTHERFORD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "9242", "9685", "4216", "2218" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(793l, new RouteTripSpec(793l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, HARRY_AINLAY) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3217", "3008", "4490" })) //
.compileBothTripSort());
map2.put(795l, new RouteTripSpec(795l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, TWIN_BROOKS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4265", "4216", "2218" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(796l, new RouteTripSpec(796l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EASTGLEN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, FRASER) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7470", "7620", "1185" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(799l, new RouteTripSpec(799l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RIVERBEND, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4808", "4489", "4069", "4246", "4029" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(800l, new RouteTripSpec(800l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MATT_BERRY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, QUEEN_ELIZABETH) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7383", "7288", "7298", "7140" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(802l, new RouteTripSpec(802l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE) // not TC
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5991", "5061", "5101", "5202" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5209", "5150", "5101" })) //
.compileBothTripSort());
map2.put(803l, new RouteTripSpec(803l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ORMSBY_PL, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BRUCE_SMITH) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5623", "5755", "5725" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(804l, new RouteTripSpec(804l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LA_PERLE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5548", "5012", "5024" })) //
.compileBothTripSort());
map2.put(805l, new RouteTripSpec(805l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL_TC) // WEDGEWOOD
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5061", "5069", "5002" })) //
.compileBothTripSort());
map2.put(806l, new RouteTripSpec(806l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"5181", "5107", "5207", //
"5549", // ==
"1759", // !=
"1867", "1735", // !=
"6122", // ==
"6333", "7011" //
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(807l, new RouteTripSpec(807l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BERIAULT, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5894", "5001" })) //
.compileBothTripSort());
map2.put(808l, new RouteTripSpec(808l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, // not TC
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Jasper Place (not TC)") //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5577", "5111" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(809l, new RouteTripSpec(809l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, HILLCREST, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5713", "5005" })) //
.compileBothTripSort());
map2.put(810l, new RouteTripSpec(810l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ST_ROSE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5611", "5001" })) //
.compileBothTripSort());
map2.put(811l, new RouteTripSpec(811l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, PARKVIEW, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5198", "5294", "5069", "5903", "5013" })) //
.compileBothTripSort());
map2.put(812l, new RouteTripSpec(812l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, PARKVIEW, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LA_PERLE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5656", "5011", "5024" })) //
.compileBothTripSort());
map2.put(814l, new RouteTripSpec(814l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ROSS_SHEPPARD, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5392", "5527", "5140", "5007" })) //
.compileBothTripSort());
map2.put(815l, new RouteTripSpec(815l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5548", "5005" })) //
.compileBothTripSort());
map2.put(817l, new RouteTripSpec(817l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BERIAULT, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5894", "5012" })) //
.compileBothTripSort());
map2.put(818l, new RouteTripSpec(818l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL_TC, // BERIAULT
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ORMSBY_PL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5718", "5725", "5004" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5894", "5004", "5755", "5828", "5718" })) //
.compileBothTripSort());
map2.put(819l, new RouteTripSpec(819l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5203", "5102", "5007" })) //
.compileBothTripSort());
map2.put(820l, new RouteTripSpec(820l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LY_CAIRNS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] {/* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2812", "2218" })) //
.compileBothTripSort());
map2.put(821l, new RouteTripSpec(821l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CRESTWOOD, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] {/* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5096", "5225", "5005" })) //
.compileBothTripSort());
map2.put(822l, new RouteTripSpec(822l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "1591", "1108", "1476", "7001" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1532", "1104", "1426",
/* + */"1050"/* + */, //
"1142" })) //
.compileBothTripSort());
map2.put(824l, new RouteTripSpec(824l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, VICTORIA, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1911", "5001", "8605" })) //
.compileBothTripSort());
map2.put(825l, new RouteTripSpec(825l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, VICTORIA) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "1554", "1237", "7002" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(826l, new RouteTripSpec(826l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MAGRATH) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2974", "4815", "4306", "4506" })) //
.compileBothTripSort());
map2.put(828l, new RouteTripSpec(828l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BROOKSIDE) // Ramsey Heights
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2974", "2707", "4021", "4034" })) //
.compileBothTripSort());
map2.put(829l, new RouteTripSpec(829l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2974", "4815" })) //
.compileBothTripSort());
map2.put(830l, new RouteTripSpec(830l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_NALLY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2547", "1142" })) //
.compileBothTripSort());
map2.put(832l, new RouteTripSpec(832l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5180", "6725", "6011" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(835l, new RouteTripSpec(835l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, AMISKWACIY) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"1979", //
"1669", // !=
"1974", "1735", // ==
"1799", "1759", // ==
"6122", // !=
"6333", "6579", "7003" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(836l, new RouteTripSpec(836l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KINGSWAY) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"1109", //
"1896", // ==
"-11329", // !=
"1821", "1669", "1974", // !=
"6122", // ==
"6328", "6252", "7003" //
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(837l, new RouteTripSpec(837l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, COLISEUM, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5204", "1814", "1814", "1110", "1205" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(839l, new RouteTripSpec(839l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5548", "5132", "5038", "5013" })) //
.compileBothTripSort());
map2.put(840l, new RouteTripSpec(840l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"5206", //
"1725", // ==
"1759", // !=
"1867", "1735", // !=
"6122", // ==
"6333", "6002", "6047", "6001" //
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(841l, new RouteTripSpec(841l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "6285", "6317", "7003" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7011", "6314", "6009" })) //
.compileBothTripSort());
map2.put(842l, new RouteTripSpec(842l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_O_LEARY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, OXFORD) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7120", "7496", "7060", "6348", "6243", "6337" })) //
.compileBothTripSort());
map2.put(843l, new RouteTripSpec(843l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5442", "5445", "1881", "1322" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(844l, new RouteTripSpec(844l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, FRASER, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW_EAST_TC) // QUEEN_ELIZABETH
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"7358", // 95 Street & 132 Avenue #QueenElizabeth
"7286", // 82 Street & 132 Avenue
"7330", // ==
"7206", // Belvedere Transit Centre
"7210", // Belvedere Transit Centre
"7335", // ==
"7104", // East Clareview Transit Centre
"7470", // 26 Street & 151 Avenue
"7437" // 21 Street & 147 Avenue #Fraser
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"7437", // 21 Street & 147 Avenue #Fraser
"7470", // 26 Street & 151 Avenue
"7105" // East Clareview Transit Centre
})) //
.compileBothTripSort());
map2.put(845l, new RouteTripSpec(845l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7120", "7496", "7060", "7007", "7186", "7106" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7106", "7572", "7185", "7007" })) //
.compileBothTripSort());
map2.put(846l, new RouteTripSpec(846l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BATURYN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5180", "6091", "6028", "6294" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(848l, new RouteTripSpec(848l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAGO_LINDO, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CARDINAL_LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "7659", "6315", "7377", "7483" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(849l, new RouteTripSpec(849l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW_EAST_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, QUEEN_ELIZABETH) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7358", "7209", "7823", "7943", "7269", "7101" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(851l, new RouteTripSpec(851l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KLARVATTEN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CARDINAL_LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "7728", "7827", "7434" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(852l, new RouteTripSpec(852l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_O_LEARY) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "7496", "6130", "6522", "6011", "6127" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(853l, new RouteTripSpec(853l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELVEDERE_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7585", "7204" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7496", "7008" })) //
.compileBothTripSort());
map2.put(855l, new RouteTripSpec(855l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_O_LEARY) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "7496", "6301", "6039", "6447" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(856l, new RouteTripSpec(856l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JH_PICARD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2779", "2824", "1729" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(858l, new RouteTripSpec(858l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STADIUM_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, AMISKWACIY) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1979", "1110", "1401" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(859l, new RouteTripSpec(859l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"5055", //
"5548", "5207", //
"5549", // ==
"1759", // !=
"1867", "1735", // !=
"6122", // ==
"6333", "7011" //
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(860l, new RouteTripSpec(860l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_NALLY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2676", "3230", "3217" })) //
.compileBothTripSort());
map2.put(861l, new RouteTripSpec(861l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_NALLY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2676", "3230", "3247", "3446" })) //
.compileBothTripSort());
map2.put(862l, new RouteTripSpec(862l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, OTTEWELL, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS) // BURNEWOOD
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
/* no stops *///
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"2487", // 73 Street & 94B Avenue
"2064", // !=
"2360", // ==
"2426", // ==
"2915", // != 61 Street & 95 Avenue
"2360", // ==
"2426", // ==
"2434", // !=
"3230", // 49 Street & 44 Avenue
"3704", // Meadows Transit Centre
"3185", // 34 Street & 35A Avenue
})) //
.compileBothTripSort());
map2.put(864l, new RouteTripSpec(864l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_NALLY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2676", "2196", "2393", "2188", "2385", "2103" })) //
.compileBothTripSort());
map2.put(865l, new RouteTripSpec(865l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, TD_BAKER, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3150", "3212" })) //
.compileBothTripSort());
map2.put(866l, new RouteTripSpec(866l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ST_KEVIN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "2439", "2307" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(867l, new RouteTripSpec(867l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAKEWOOD) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "3002", "3217" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(869l, new RouteTripSpec(869l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, OTTEWELL_AOB, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
/* no stops *///
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"2487", // 73 Street & 94B Avenue
"2064", // !=
"2360", // ==
"2426", // ==
"2915", // != 61 Street & 95 Avenue
"2360", // ==
"2426", // ==
"2434", // !=
"3355", // 50 Street & Jamha Road
"3411", // 23 Street & 37A Avenue
"3217", // Mill Woods Transit Centre
})) //
.compileBothTripSort());
map2.put(870l, new RouteTripSpec(870l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHWOOD, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAKEWOOD) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "3002", "3204", "3142" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(871l, new RouteTripSpec(871l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELLE_RIVE, // LAGO_LINDO
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MARY_BUTTERWORTH) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "6285", "7377", "7780", "7430" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(872l, new RouteTripSpec(872l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WP_WAGNER, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2418", "2103", "3003", "3214" })) //
.compileBothTripSort());
map2.put(873l, new RouteTripSpec(873l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KENILWORTH, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WOODVALE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2160", "3461" })) //
.compileBothTripSort());
map2.put(874l, new RouteTripSpec(874l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WP_WAGNER, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2189", "3204", "3142" })) //
.compileBothTripSort());
map2.put(875l, new RouteTripSpec(875l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WP_WAGNER, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2418", "3529", "3211" })) //
.compileBothTripSort());
map2.put(876l, new RouteTripSpec(876l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WP_WAGNER, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2189", "3203", "3356" })) //
.compileBothTripSort());
map2.put(877l, new RouteTripSpec(877l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JH_PICARD, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2080", "2640", "2245", "3004", "3201" })) //
.compileBothTripSort());
map2.put(878l, new RouteTripSpec(878l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HARDISTY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2177", "3355", "3217" })) //
.compileBothTripSort());
map2.put(879l, new RouteTripSpec(879l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, OTTEWELL, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE) // Mill Woods?
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2487", "2188", "2526", "2103" })) //
.compileBothTripSort());
map2.put(880l, new RouteTripSpec(880l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KENILWORTH, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2160", "2188", "2105", "3529", "3211" })) //
.compileBothTripSort());
map2.put(881l, new RouteTripSpec(881l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_NALLY) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "2151", "2301" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(882l, new RouteTripSpec(882l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KENILWORTH, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2160", "2188", "2526", "2103", "3003", "3214" })) //
.compileBothTripSort());
map2.put(883l, new RouteTripSpec(883l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, VERNON_BARFORD, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, TWIN_BROOKS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"4340", // 119 Street & Fairway Drive
"4238", // 119 Street & Fairway Drive
"4265", // Twin Brooks Drive & 12 Avenue
"4248", // Running Creek Road & 12 Avenue
})) //
.compileBothTripSort());
map2.put(884l, new RouteTripSpec(884l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WP_WAGNER, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAKEWOOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2418", "3008", "3023", "3008" })) //
.compileBothTripSort());
map2.put(885l, new RouteTripSpec(885l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, VERNON_BARFORD, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4270", "4238", "4214" })) //
.compileBothTripSort());
map2.put(886l, new RouteTripSpec(886l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, AVALON, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2009", "2207" })) //
.compileBothTripSort());
map2.put(887l, new RouteTripSpec(887l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HARRY_AINLAY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, TWIN_BROOKS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4143", "4204", "4265", "4248" })) //
.compileBothTripSort());
map2.put(888l, new RouteTripSpec(888l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, VERNON_BARFORD) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "4270", "4238", "4205" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(889l, new RouteTripSpec(889l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2206", "4490", "4143", "4198", "4205", //
"4290", "4203" })) //
.compileBothTripSort());
map2.put(890l, new RouteTripSpec(890l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2974", "2201" })) //
.compileBothTripSort());
map2.put(892l, new RouteTripSpec(892l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HARRY_AINLAY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4490", "4486", "4208" })) //
.compileBothTripSort());
map2.put(893l, new RouteTripSpec(893l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, HARRY_AINLAY) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "4490", "3004", "3217" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(894l, new RouteTripSpec(894l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA) // Allendale
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "2741", "2974", "2102" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(895l, new RouteTripSpec(895l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2821", "2664", "2212" })) //
.compileBothTripSort());
map2.put(896l, new RouteTripSpec(896l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RIVERBEND, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4021", "4803" })) //
.compileBothTripSort());
map2.put(897l, new RouteTripSpec(897l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAKEWOOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3002", "3214", "3740", "2110" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(898l, new RouteTripSpec(898l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2974", "2102", "3001", "3217" })) //
.compileBothTripSort());
map2.put(899l, new RouteTripSpec(899l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5991", "5061", "5069", "5903", "5012" })) //
.compileBothTripSort());
map2.put(901l, new RouteTripSpec(901l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LONDONDERRY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, _142_ST_109_AVE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5055", "5548", "7011", "6304", "7456" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(902l, new RouteTripSpec(902l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, PARKVIEW, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ORMSBY_PL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5656", "5611", "5755", "5828", "5725" })) //
.compileBothTripSort());
map2.put(903l, new RouteTripSpec(903l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, QUEEN_ELIZABETH) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7383", "7260", "7909" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(907l, new RouteTripSpec(907l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HADDOW) // Rhatigan Rdg
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2974", "4021", "4016" })) //
.compileBothTripSort());
map2.put(908l, new RouteTripSpec(908l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, FRASER, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EASTGLEN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1033", "7237" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(909l, new RouteTripSpec(909l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EASTGLEN) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "1185", "7120", "7009", "6315" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(913l, new RouteTripSpec(913l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5211", "7011", "6313" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(914l, new RouteTripSpec(914l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, OXFORD, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5207", "6328", "6337" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(916l, new RouteTripSpec(916l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BATURYN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"5206", //
"1725", // ==
"1759", // !=
"1867", "1735", // !=
"6122", // ==
"6002"//
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(917l, new RouteTripSpec(917l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, FR_TROY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JACKSON_HTS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3411", "3267" })) //
.compileBothTripSort());
map2.put(918l, new RouteTripSpec(918l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, FR_TROY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JACKSON_HTS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3964", "3420" })) //
.compileBothTripSort());
map2.put(919l, new RouteTripSpec(919l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EASTGLEN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1033", "1521", "1001" })) //
.compileBothTripSort());
map2.put(920l, new RouteTripSpec(920l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MINCHAU, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HOLY_FAMILY) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3153", "3363" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(921l, new RouteTripSpec(921l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_NALLY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SILVERBERRY) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2676", "3230", "3419" })) //
.compileBothTripSort());
map2.put(922l, new RouteTripSpec(922l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5548", "4579", "4806" })) //
.compileBothTripSort());
map2.put(923l, new RouteTripSpec(923l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, TWIN_BROOKS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2204", "4490", "4204", "4265", "4248" })) //
.compileBothTripSort());
map2.put(924l, new RouteTripSpec(924l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DAN_KNOTT) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "3572", "3006", "3208" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(925l, new RouteTripSpec(925l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WINDSOR_PARK) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2974", "2844" })) //
.compileBothTripSort());
map2.put(926l, new RouteTripSpec(926l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, OTTEWELL, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"2050", // 61 Street & 94B Avenue
"2287", // Ottewell Road & 94 Avenue
"2752", // == 112 Street & 82 Avenue
"2982", // != 114 Street & 83 Avenue
"22354", // != 114 Street & 83 Avenue
"2638", // == 114 Street & 85 Avenue
"2001", // University Transit Centre
"2702", // South Campus Transit Centre Fort Edmonton Park
"5296", // ++
"5006", // West Edmonton Mall Transit Centre
})) //
.compileBothTripSort());
map2.put(931l, new RouteTripSpec(931l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LA_ZERTE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAGO_LINDO) // KLARVATTEN
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7384", "7483" })) //
.compileBothTripSort());
map2.put(932l, new RouteTripSpec(932l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW_WEST_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LA_ZERTE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7384", "7241", "7604", "7901" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(934l, new RouteTripSpec(934l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LA_ZERTE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7572", "6311", "6008" })) //
.compileBothTripSort());
map2.put(935l, new RouteTripSpec(935l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HOLLICK_KENYON, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_LEOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "7160", "7535", "7298", "7140" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(939l, new RouteTripSpec(939l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ELSINORE, // CHAMBERY
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MARY_BUTTERWORTH) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "6285", "6166", "6674" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(940l, new RouteTripSpec(940l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, AMISKWACIY) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1979", "1476", "1201", "1001" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(941l, new RouteTripSpec(941l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, AOB) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"2915", //
"1086", // ==
"1001", // !=
"1003" // !=
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(943l, new RouteTripSpec(943l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELVEDERE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, AOB) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2915", "1206", "7210" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(945l, new RouteTripSpec(945l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, _88_ST_132_AVE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "7496", "6315" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(948l, new RouteTripSpec(948l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, HILLCREST, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LA_PERLE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5713", "5011", "5024" })) //
.compileBothTripSort());
map2.put(949l, new RouteTripSpec(949l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, HILLCREST, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LA_PERLE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5713", "5012", "5024" })) //
.compileBothTripSort());
map2.put(950l, new RouteTripSpec(950l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BERIAULT, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LYMBURN_ORMSBY_PL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] {/* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5894", "5419", "5725" })) //
.compileBothTripSort());
map2.put(952l, new RouteTripSpec(952l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CRESTWOOD, // RIO_TERRACE
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ST_FRANCIS_XAVIER) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"5071", "5174", "5433", //
"5588", // ==
"5198", // !=
"5043", // !=
"5120" // !=
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(953l, new RouteTripSpec(953l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LYMBURN) // ORMSBY_PL
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5355", "5004", "5755", "5828", "5725" })) //
.compileBothTripSort());
map2.put(954l, new RouteTripSpec(954l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEDGEWOOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5687", "5002", "5979", "5968" })) //
.compileBothTripSort());
map2.put(955l, new RouteTripSpec(955l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LA_PERLE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5355", "5011", "5024" })) //
.compileBothTripSort());
map2.put(956l, new RouteTripSpec(956l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, THE_GRANGE) // THE_HAMPTONS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5687", "8135", "8097", "8102" })) //
.compileBothTripSort());
map2.put(957l, new RouteTripSpec(957l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, OSCAR_ROMERO, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, THE_HAMPTONS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5980", "5695", "8583", "8033", "8670" })) //
.compileBothTripSort());
map2.put(959l, new RouteTripSpec(959l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, OSCAR_ROMERO) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5695", "5002" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(965l, new RouteTripSpec(965l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BRAEMAR, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2462", "1989" })) //
.compileBothTripSort());
map2.put(966l, new RouteTripSpec(966l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, OTTEWELL_AOB, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2487", //
"2064", //
"2915", //
"2360", //
"2426", //
"2434", //
"3355", "3157", "3217" })) //
.compileBothTripSort());
map2.put(967l, new RouteTripSpec(967l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WHITEMUD_DR_53_AVE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4021", "4353", "4809" })) //
.compileBothTripSort());
map2.put(968l, new RouteTripSpec(968l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ST_ROSE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5611", "4579", "4806" })) //
.compileBothTripSort());
map2.put(969l, new RouteTripSpec(969l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WHITEMUD_DR_53_AVE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4021", "4129", "4804" })) //
.compileBothTripSort());
map2.put(970l, new RouteTripSpec(970l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WHITEMUD_DR_53_AVE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JOSEPH_MC_NEIL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"4950", "4636", "4811", "4597", //
"4158", //
"4153" //
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] {/* no stops */})) //
.compileBothTripSort());
map2.put(971l, new RouteTripSpec(971l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, _84_ST_105_AVE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, _34_ST_35A_AVE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2676", "3355", "3708", "3185" })) //
.compileBothTripSort());
map2.put(972l, new RouteTripSpec(972l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MAC_EWAN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RUTHERFORD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "9251", "9848", "9685" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4213", "9666", "9242", "9251" })) //
.compileBothTripSort());
map2.put(973l, new RouteTripSpec(973l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_PHERSON, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BURNEWOOD) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3603", "3267" })) //
.compileBothTripSort());
map2.put(974l, new RouteTripSpec(974l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_PHERSON, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BURNEWOOD) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3603", "3420" })) //
.compileBothTripSort());
map2.put(975l, new RouteTripSpec(975l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2106", "3355", "3748", "3185", "3206" })) //
.compileBothTripSort());
map2.put(976l, new RouteTripSpec(976l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RIVERBEND, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4021", "4803", "4202" })) //
.compileBothTripSort());
map2.put(977l, new RouteTripSpec(977l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MEADOWS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "3217", "3470", "3703" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
ALL_ROUTE_TRIPS2 = map2;
}
@Override
public Pair<Long[], Integer[]> splitTripStop(MRoute mRoute, GTrip gTrip, GTripStop gTripStop, ArrayList<MTrip> splitTrips, GSpec routeGTFS) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) {
return SplitUtils.splitTripStop(mRoute, gTrip, gTripStop, routeGTFS, ALL_ROUTE_TRIPS2.get(mRoute.getId()));
}
return super.splitTripStop(mRoute, gTrip, gTripStop, splitTrips, routeGTFS);
}
private static final Pattern N_A_I_T = Pattern.compile("((^|\\W){1}(n a i t)(\\W|$){1})", Pattern.CASE_INSENSITIVE);
private static final String N_A_I_T_REPLACEMENT = "$2" + NAIT + "$4";
private static final Pattern SUPER_EXPRESS = Pattern.compile("((^|\\W){1}(super express)(\\W|$){1})", Pattern.CASE_INSENSITIVE);
private static final Pattern STARTS_WITH_RSN = Pattern.compile("(^[\\d]+\\s)", Pattern.CASE_INSENSITIVE);
private static final String VIA = " via ";
@Override
public String cleanTripHeadsign(String tripHeadsign) {
int indexOfVIA = tripHeadsign.toLowerCase(Locale.ENGLISH).indexOf(VIA);
if (indexOfVIA >= 0) {
tripHeadsign = tripHeadsign.substring(indexOfVIA); // remove trip head sign from stop head sign
}
tripHeadsign = STARTS_WITH_RSN.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
tripHeadsign = TRANSIT_CENTER.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
tripHeadsign = TOWN_CENTER.matcher(tripHeadsign).replaceAll(TOWN_CENTER_REPLACEMENT);
tripHeadsign = SUPER_EXPRESS.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
tripHeadsign = EDMONTON.matcher(tripHeadsign).replaceAll(EDMONTON_REPLACEMENT);
tripHeadsign = N_A_I_T.matcher(tripHeadsign).replaceAll(N_A_I_T_REPLACEMENT);
tripHeadsign = CleanUtils.cleanStreetTypes(tripHeadsign);
tripHeadsign = CleanUtils.cleanNumbers(tripHeadsign);
tripHeadsign = CleanUtils.removePoints(tripHeadsign);
return CleanUtils.cleanLabel(tripHeadsign);
}
private static final Pattern TRANSIT_CENTER = Pattern.compile("((^|\\W){1}(transit center|transit centre)(\\W|$){1})", Pattern.CASE_INSENSITIVE);
private static final String TRANSIT_CENTER_REPLACEMENT = "$2" + TRANSIT_CENTER_SHORT + "$4";
private static final Pattern TOWN_CENTER = Pattern.compile("((^|\\W){1}(town center|town centre)(\\W|$){1})", Pattern.CASE_INSENSITIVE);
private static final String TOWN_CENTER_REPLACEMENT = "$2TC$4";
private static final Pattern INTERNATIONAL = Pattern.compile("((^|\\W){1}(international)(\\W|$){1})", Pattern.CASE_INSENSITIVE);
private static final String INTERNATIONAL_REPLACEMENT = "$2Int$4";
private static final Pattern EDMONTON = Pattern.compile("((^|\\W){1}(edmonton)(\\W|$){1})", Pattern.CASE_INSENSITIVE);
private static final String EDMONTON_REPLACEMENT = "$2" + EDM + "$4";
@Override
public String cleanStopName(String gStopName) {
gStopName = TRANSIT_CENTER.matcher(gStopName).replaceAll(TRANSIT_CENTER_REPLACEMENT);
gStopName = TOWN_CENTER.matcher(gStopName).replaceAll(TOWN_CENTER_REPLACEMENT);
gStopName = INTERNATIONAL.matcher(gStopName).replaceAll(INTERNATIONAL_REPLACEMENT);
gStopName = EDMONTON.matcher(gStopName).replaceAll(EDMONTON_REPLACEMENT);
gStopName = CleanUtils.cleanStreetTypes(gStopName);
gStopName = CleanUtils.cleanNumbers(gStopName);
return CleanUtils.cleanLabel(gStopName);
}
@Override
public int getStopId(GStop gStop) {
return Math.abs(super.getStopId(gStop)); // remove negative stop IDs
}
private static final Pattern REMOVE_STARTING_DASH = Pattern.compile("(^\\-)", Pattern.CASE_INSENSITIVE);
@Override
public String getStopCode(GStop gStop) {
String stopCode = super.getStopCode(gStop); // do not change, used by real-time API
stopCode = REMOVE_STARTING_DASH.matcher(stopCode).replaceAll(StringUtils.EMPTY);
return stopCode; // do not change, used by real-time API
}
}
| src/org/mtransit/parser/ca_edmonton_ets_bus/EdmontonETSBusAgencyTools.java | package org.mtransit.parser.ca_edmonton_ets_bus;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
import org.mtransit.parser.CleanUtils;
import org.mtransit.parser.DefaultAgencyTools;
import org.mtransit.parser.Pair;
import org.mtransit.parser.SplitUtils;
import org.mtransit.parser.SplitUtils.RouteTripSpec;
import org.mtransit.parser.Utils;
import org.mtransit.parser.gtfs.data.GCalendar;
import org.mtransit.parser.gtfs.data.GCalendarDate;
import org.mtransit.parser.gtfs.data.GRoute;
import org.mtransit.parser.gtfs.data.GSpec;
import org.mtransit.parser.gtfs.data.GStop;
import org.mtransit.parser.gtfs.data.GTrip;
import org.mtransit.parser.gtfs.data.GTripStop;
import org.mtransit.parser.mt.data.MAgency;
import org.mtransit.parser.mt.data.MDirectionType;
import org.mtransit.parser.mt.data.MRoute;
import org.mtransit.parser.mt.data.MTrip;
import org.mtransit.parser.mt.data.MTripStop;
// https://data.edmonton.ca/
// https://data.edmonton.ca/Transit/ETS-Bus-Schedule-GTFS-Data-Feed-zipped-files/gzhc-5ss6
// https://data.edmonton.ca/download/gzhc-5ss6/application/zip
// http://www.edmonton.ca/ets/ets-data-for-developers.aspx
public class EdmontonETSBusAgencyTools extends DefaultAgencyTools {
public static void main(String[] args) {
if (args == null || args.length == 0) {
args = new String[3];
args[0] = "input/gtfs.zip";
args[1] = "../../mtransitapps/ca-edmonton-ets-bus-android/res/raw/";
args[2] = ""; // files-prefix
}
new EdmontonETSBusAgencyTools().start(args);
}
private HashSet<String> serviceIds;
@Override
public void start(String[] args) {
System.out.printf("\nGenerating ETS bus data...");
long start = System.currentTimeMillis();
this.serviceIds = extractUsefulServiceIds(args, this, true);
super.start(args);
System.out.printf("\nGenerating ETS bus data... DONE in %s.\n", Utils.getPrettyDuration(System.currentTimeMillis() - start));
}
@Override
public boolean excludeCalendar(GCalendar gCalendar) {
if (this.serviceIds != null) {
return excludeUselessCalendar(gCalendar, this.serviceIds);
}
return super.excludeCalendar(gCalendar);
}
@Override
public boolean excludeCalendarDate(GCalendarDate gCalendarDates) {
if (this.serviceIds != null) {
return excludeUselessCalendarDate(gCalendarDates, this.serviceIds);
}
return super.excludeCalendarDate(gCalendarDates);
}
@Override
public boolean excludeTrip(GTrip gTrip) {
if (this.serviceIds != null) {
return excludeUselessTrip(gTrip, this.serviceIds);
}
return super.excludeTrip(gTrip);
}
@Override
public Integer getAgencyRouteType() {
return MAgency.ROUTE_TYPE_BUS;
}
@Override
public long getRouteId(GRoute gRoute) {
return Long.parseLong(gRoute.getRouteId()); // using route short name as route ID
}
private static final String DASH = " - ";
private static final String SLASH = " / ";
private static final String FORT = "Fort";
private static final String _AVE = " Ave";
private static final String _ST = " St";
private static final String TRANSIT_CENTER_SHORT = "TC";
private static final String EDM = "Edm";
private static final String EDM_GARRISON = EDM + " Garrison";
private static final String WEST_EDM_MALL = "WEM"; // "West " + EDM + " Mall";
private static final String WEST_EDM_MALL_TC = "WEM"; // "WEM TC"
private static final String LEWIS_FARMS = "Lewis Farms";
private static final String LEWIS_FARMS_TC = "Lewis Farms"; // "Lewis Farms TC"
private static final String CAPILANO = "Capilano"; //
private static final String CAPILANO_TC = "Capilano"; // "Capilano TC"
private static final String CLAREVIEW = "Clareview";
private static final String CLAREVIEW_EAST_TC = "Clareview"; // "East Clareview TC"
private static final String CLAREVIEW_WEST_TC = "Clareview"; // "West Clareview TC"
private static final String CROMDALE = "Cromdale";
private static final String JASPER_PLACE = "Jasper Pl";
private static final String CONCORDIA = "Concordia";
private static final String COLISEUM = "Coliseum";
private static final String COLISEUM_TC = COLISEUM; // "Coliseum TC";
private static final String WESTMOUNT = "Westmount";
private static final String WESTMOUNT_TC = WESTMOUNT; // "Westmount TC"
private static final String UNIVERSITY = "University";
private static final String UNIVERSITY_TC = UNIVERSITY; // "University TC";
private static final String MILL_WOODS = "Mill Woods";
private static final String MILL_WOODS_ = "Mill Woods TC";
private static final String MILL_WOODS_TC = "Mill Woods TC";
private static final String DAN_KNOTT = "Dan Knott";
private static final String NAIT = "NAIT";
private static final String SOUTHGATE = "Southgate";
private static final String SOUTHGATE_TC = "Southgate"; // "Southgate TC"
private static final String NORTHGATE = "Northgate";
private static final String NORTHGATE_TC = "Northgate"; // "Northgate TC"
private static final String ABBOTTSFIELD = "Abbottsfield";
private static final String AMISKWACIY = "amiskwaciy";
private static final String EAUX_CLAIRES = "Eaux Claires";
private static final String DOWNTOWN = "Downtown";
private static final String MILLGATE = "Millgate";
private static final String MILLGATE_TC = "Millgate"; // "Millgate TC"
private static final String GOV_CTR = "Gov Ctr";
private static final String MAC_EWAN = "MacEwan";
private static final String MAC_EWAN_GOV_CTR = MAC_EWAN + SLASH + GOV_CTR;
private static final String CASTLE_DOWNS = "Castle Downs";
private static final String CASTLE_DOWNS_TC = "Castle Downs"; // "Castle Downs TC"
private static final String CENTURY_PK = "Century Pk";
private static final String CENTURY_PK_TC = CENTURY_PK; // "Century Pk TC";
private static final String YELLOWBIRD = "Yellowbird";
private static final String SOUTH_CAMPUS = "South Campus";
private static final String SOUTH_CAMPUS_TC = SOUTH_CAMPUS; // "South Campus TC";
private static final String FT_EDM = FORT + " " + EDM;
private static final String LEGER = "Leger";
private static final String LEGER_TC = LEGER; // "Leger TC"
private static final String BRANDER_GDNS = "Brander Gdns";
private static final String MEADOWS = "Mdws"; // "Meadows";
private static final String BLACKMUD_CRK = "Blackmud Crk";
private static final String BLACKBURNE = "Blackburne";
private static final String ALLARD = "Allard";
private static final String HARRY_AINLAY = "Harry Ainlay";
private static final String TWIN_BROOKS = "Twin Brooks";
private static final String RUTHERFORD = "Rutherford";
private static final String SOUTHWOOD = "Southwood";
private static final String SOUTH_EDM_COMMON = "South " + EDM + " Common";
private static final String PARKALLEN = "Parkallen";
private static final String KNOTTWOOD = "Knottwood";
private static final String BELVEDERE = "Belvedere";
private static final String BELVEDERE_TC = "Belvedere"; // "Belvedere TC"
private static final String BONNIE_DOON = "Bonnie Doon";
private static final String LAUREL = "Laurel";
private static final String PLYPOW = "Plypow";
private static final String TAMARACK = "Tamarack";
private static final String BRECKENRIDGE_GRNS = "Breckenridge Grns";
private static final String WESTRIDGE = "Westridge";
private static final String LESSARD = "Lessard";
private static final String CAMERON_HTS = "Cameron Hts";
private static final String LYMBURN = "Lymburn";
private static final String ARCH_MAC = "Arch Mac"; // Donald
private static final String ROSS_SHEPPARD = "Ross Shep"; // "Ross Sheppard";
private static final String ORMSBY_PL = "Ormsby Pl";
private static final String LYMBURN_ORMSBY_PL = LYMBURN + SLASH + ORMSBY_PL;
private static final String BERIAULT = "Beriault";
private static final String CRESTWOOD = "Crestwood";
private static final String ST_FRANCIS_XAVIER = "St Francis Xavier";
private static final String LA_PERLE = "LaPerle";
private static final String LA_ZERTE = "LaZerte";
private static final String MARY_BUTTERWORTH = "Mary Butterworth";
private static final String HILLCREST = "Hillcrest";
private static final String CARLTON = "Carlton";
private static final String WEDGEWOOD = "Wedgewood";
private static final String THE_GRANGE = "The Grange";
private static final String RIO_TERRACE = "Rio Ter";
private static final String THE_HAMPTONS = "The Hamptons";
private static final String WESTVIEW_VLG = "Westview Vlg";
private static final String MISTATIM_IND = "Mistatim Ind";
private static final String STADIUM = "Stadium";
private static final String STADIUM_TC = "Stadium"; // "Stadium TC"
private static final String LAGO_LINDO = "Lago Lindo";
private static final String MONTROSE = "Montrose";
private static final String KINGSWAY = "Kingsway";
private static final String KING_EDWARD_PK = "King Edward Pk";
private static final String RAPPERSWILL = "Rapperswill";
private static final String OXFORD = "Oxford";
private static final String _34_ST_35A_AVE = "34" + _ST + SLASH + "35A" + _AVE;
private static final String _82_ST = "82" + _ST;
private static final String _82_ST_132_AVE = "82" + _ST + SLASH + "132" + _AVE;
private static final String _84_ST_105_AVE = "84" + _ST + SLASH + "105" + _AVE;
private static final String _84_ST_111_AVE = "84" + _ST + SLASH + " 111" + _AVE;
private static final String _85_ST_132_AVE = "85" + _ST + DASH + "132" + _AVE;
private static final String _88_ST_132_AVE = "88" + _ST + SLASH + "132" + _AVE;
private static final String _95_ST_132_AVE = "95" + _ST + SLASH + "132" + _AVE;
private static final String _127_ST_129_AVE = "127" + _ST + SLASH + "129" + _AVE;
private static final String _142_ST_109_AVE = "142" + _ST + SLASH + "109" + _AVE;
private static final String WHITEMUD_DR_53_AVE = "Whitemud Dr" + SLASH + "53 " + _AVE;
private static final String JOSEPH_MC_NEIL = "Joseph McNeil";
private static final String CANOSSA = "Canossa";
private static final String CHAMBERY = "Chambery";
private static final String KERNOHAN = "Kernohan";
private static final String LONDONDERRY = "Londonderry";
private static final String EVERGREEN = "Evergreen";
private static final String FRASER = "Fraser";
private static final String FT_SASKATCHEWAN = FORT + " Saskatchewan";
private static final String SPRUCE_GRV = "Spruce Grv";
private static final String MC_CONACHIE = "McConachie";
private static final String SCHONSEE = "Schonsee";
private static final String BRINTNELL = "Brintnell";
private static final String KLARVATTEN = "Klarvatten";
private static final String RIVERDALE = "Riverdale";
private static final String GOLD_BAR = "Gold Bar";
private static final String JASPER_GATES = "Jasper Gts";
private static final String SOUTHPARK = "Southpark";
private static final String NORTHLANDS = "Northlands";
private static final String HAWKS_RDG = "Hawks Rdg";
private static final String WINTERBURN = "Winterburn";
private static final String WINTERBURN_IND = WINTERBURN + " Ind";
private static final String HOLYROOD = "Holyrood";
private static final String STRATHCONA = "Strathcona";
private static final String STRATHCONA_IND = STRATHCONA + " Ind";
private static final String WINDSOR_PARK = "Windsor Pk";
private static final String RITCHIE = "Ritchie";
private static final String AMBLESIDE = "Ambleside";
private static final String WINDERMERE = "Windermere";
private static final String BELGRAVIA = "Belgravia";
private static final String ROSENTHAL = "Rosenthal";
private static final String CHAPPELLE = "Chappelle";
private static final String ORCHARDS = "Orchards";
private static final String QUARRY_RDG = "Quarry Rdg";
private static final String HOLLICK_KENYON = "Hollick Kenyon";
private static final String MC_LEOD = "McLeod";
private static final String EDM_WASTE_MGT_CTR = EDM + " Waste Mgt Ctr";
private static final String VLY_ZOO = "Vly Zoo";
private static final String VLY_ZOO_FT_EDM = VLY_ZOO + SLASH + FT_EDM;
private static final String EDM_INT_AIRPORT = "Edm Int Airport";
private static final String GRIESBACH = "Griesbach";
private static final String REMAND_CTR = "Remand Ctr";
private static final String ARCH_O_LEARY = "Arch O'Leary";
private static final String OTTEWELL = "Ottewell";
private static final String AOB = "AOB";
private static final String OTTEWELL_AOB = OTTEWELL + SLASH + AOB;
private static final String BURNEWOOD = "Burnewood";
private static final String MC_PHERSON = "McPherson";
private static final String ST_ROSE = "St Rose";
private static final String OSCAR_ROMERO = "Oscar Romero";
private static final String BRAEMAR = "Braemar";
private static final String PARKVIEW = "Parkview";
private static final String QUEEN_ELIZABETH = "Queen Elizabeth";
private static final String HADDOW = "Haddow";
private static final String FR_TROY = "Fr Troy";
private static final String JACKSON_HTS = "Jackson Hts";
private static final String BATURYN = "Baturyn";
private static final String EASTGLEN = "Eastglen";
private static final String MINCHAU = "Minchau";
private static final String HOLY_FAMILY = "Holy Family";
private static final String MC_NALLY = "McNally";
private static final String SILVERBERRY = "SilverBerry";
private static final String VICTORIA = "Victoria";
private static final String MEADOWLARK = "Meadowlark";
private static final String WESTLAWN = "Westlawn";
private static final String BELMEAD = "Belmead";
private static final String MATT_BERRY = "Matt Berry";
private static final String JJ_BOWLEN = "JJ Bowlen";
private static final String CARDINAL_LEGER = "Cardinal Leger";
private static final String DUNLUCE = "Dunluce";
private static final String BEAUMARIS = "Beaumaris";
private static final String ELSINORE = "Elsinore";
private static final String RIVERBEND = "Riverbend";
private static final String BEARSPAW = "Bearspaw";
private static final String AVALON = "Avalon";
private static final String WILDROSE = "Wildrose";
private static final String GREENVIEW = "Greenview";
private static final String KENILWORTH = "Kenilworth";
private static final String HARDISTY = "Hardisty";
private static final String CRAWFORD_PLAINS = "Crawford Plains";
private static final String RHATIGAN_RIDGE = "Rhatigan Rdg";
private static final String AVONMORE = "Avonmore";
private static final String LARKSPUR = "Larkspur";
private static final String MAYLIEWAN = "Mayliewan";
private static final String WP_WAGNER = "WP Wagner";
private static final String BROOKSIDE = "Brookside";
private static final String MAGRATH = "Magrath";
private static final String LY_CAIRNS = "LY Cairns";
private static final String BRUCE_SMITH = "Bruce Smith";
private static final String JH_PICARD = "JH Picard";
private static final String TD_BAKER = "TD Baker";
private static final String ST_KEVIN = "St Kevin";
private static final String LAKEWOOD = "Lakewood";
private static final String WOODVALE = "Woodvale";
private static final String VERNON_BARFORD = "Vernon Barford";
private static final String BELLE_RIVE = "Belle Rive";
private static final String LENDRUM = "Lendrum";
@Override
public String getRouteLongName(GRoute gRoute) {
String gRouteLongName = gRoute.getRouteLongName();
gRouteLongName = CleanUtils.cleanStreetTypes(gRouteLongName);
return CleanUtils.cleanLabel(gRouteLongName);
}
@Override
public String getRouteShortName(GRoute gRoute) {
return super.getRouteShortName(gRoute); // do not change, used by real-time API
}
private static final String AGENCY_COLOR_BLUE = "2D3092"; // BLUE (from Wikipedia SVG)
private static final String AGENCY_COLOR = AGENCY_COLOR_BLUE;
@Override
public String getAgencyColor() {
return AGENCY_COLOR;
}
@Override
public int compareEarly(long routeId, List<MTripStop> list1, List<MTripStop> list2, MTripStop ts1, MTripStop ts2, GStop ts1GStop, GStop ts2GStop) {
if (ALL_ROUTE_TRIPS2.containsKey(routeId)) {
return ALL_ROUTE_TRIPS2.get(routeId).compare(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop);
}
return super.compareEarly(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop);
}
@Override
public int compare(long routeId, List<MTripStop> list1, List<MTripStop> list2, MTripStop ts1, MTripStop ts2, GStop ts1GStop, GStop ts2GStop) {
return super.compare(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop);
}
@Override
public void setTripHeadsign(MRoute mRoute, MTrip mTrip, GTrip gTrip, GSpec gtfs) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) {
return; // split
}
String tripHeadsign = gTrip.getTripHeadsign();
if ("1".equals(tripHeadsign)) {
tripHeadsign = null;
}
if (StringUtils.isEmpty(tripHeadsign)) {
System.out.printf("\nUnexpected trip to split %s\n", gTrip);
System.exit(-1);
}
mTrip.setHeadsignString(tripHeadsign, gTrip.getDirectionId()); // cleanTripHeadsign() currently used for stop head sign
}
@Override
public boolean mergeHeadsign(MTrip mTrip, MTrip mTripToMerge) {
System.out.printf("\nUnexpected trips to merge: %s & %s!\n", mTrip, mTripToMerge);
System.exit(-1);
return false;
}
@Override
public ArrayList<MTrip> splitTrip(MRoute mRoute, GTrip gTrip, GSpec gtfs) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) {
return ALL_ROUTE_TRIPS2.get(mRoute.getId()).getAllTrips();
}
return super.splitTrip(mRoute, gTrip, gtfs);
}
private static HashMap<Long, RouteTripSpec> ALL_ROUTE_TRIPS2;
static {
HashMap<Long, RouteTripSpec> map2 = new HashMap<Long, RouteTripSpec>();
map2.put(1l, new RouteTripSpec(1l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"5009", // West Edmonton Mall Transit Centre
"5302", // Meadowlark Transit Centre
"5110", // Jasper Place Transit Centre
"5169", // == 142 Street & Stony Plain Road
"5432", "1047", // !=
"5440", "1917", // !=
"1242", // == 124 Street & 102 Avenue
"1322", // == 103 Street & Jasper Avenue
"1336", // != 101 Street & Jasper Avenue
"1346", // != 101 Street & 101A Avenue
"1346", // 101 Street & 101A Avenue
"2591", // 79 Street & 106 Avenue
"2301" // Capilano Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"2301", // Capilano Transit Centre
"2267", // 79 Street & 106 Avenue
"1620", // 101 Street & Jasper Avenue
"1746", // == 122 Street & 102 Avenue
"1971", "5087", // !=
"1828", "5564", // !=
"5157", // == 140 Street & Stony Plain Road
"5101", // Jasper Place Transit Centre
"5301", // Meadowlark Transit Centre
"5009" // West Edmonton Mall Transit Centre
})) //
.compileBothTripSort());
map2.put(2l, new RouteTripSpec(2l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LESSARD) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5723", "5008", "5437", "1336",
/* + */"1256"/* + */, //
"1408", "1561", "1454", "7902" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7902", "1561", "1407",
/* + */"1266"/* + */, //
"1620", "5185", "5003", "5723" })) //
.compileBothTripSort());
map2.put(3l, new RouteTripSpec(3l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CROMDALE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5106", "5928", "1279", "1360", //
"1243", // ==
"1142", // !=
"1336", // !=
"1256", "1147" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1147", "1346", "1775", "1846", "1669", "5389", "5106" })) //
.compileBothTripSort());
map2.put(4l, new RouteTripSpec(4l, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"8601", // Lewis Farms Transit Centre
"5006", // West Edmonton Mall Transit Centre
"2702", // South Campus Transit Centre Fort Edmonton Park // LAST
"2714", // South Campus Transit Centre Fort Edmonton Park // CONTINUE
"2748", // ==
"22354", // != <>
"2982", // != <>
"2638", // == <>
"2625", // != <>
"2890", // == <> 114 Street & 89 Avenue
"2002", // University Transit Centre
"2065", // == 87 Street & 82 Avenue
"2593", // != 85 Street & 82 Avenue
"2196", // != 83 Street & 90 Avenue
"2952", // != 83 Street & 84 Avenue
"2159", // <> 83 Street & 82 Avenue // LAST
"2549", // 83 Street & 82 Avenue // LAST
"2447", // 83 Street & 82 Avenue // CONTINUE
"2222", // !=
"2372", // !=
"2306", // Capilano Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"2306", // Capilano Transit Centre
"2532", // !=
"2532", "2476", "2568", "2050", "2462", "2161", "2287", "2288", "2494", "2376", "2231", "2015", "2615", "2608", "2167", "2193", //
"2037", // !=
"2159", // <> 83 Street & 82 Avenue // CONTINUE
"2590", // !=
"2340", "2087", "2131", "2294", "2236", "2033", "2659", "2853", "2723", "2891", "2845", "2683", "2893", "2788", "2689", //
"2733", // !=
"2752", // ==
"22354", // != <>
"2982", // != <>
"2638", // == <>
"2625", // != <>
"2890", // == <> 114 Street & 89 Avenue
"2001", // != University Transit Centre
"2702", // South Campus Transit Centre Fort Edmonton Park
"5006", // West Edmonton Mall Transit Centre // LAST
"5003", // West Edmonton Mall Transit Centre // CONTINUE
"8601" // Lewis Farms Transit Centre
})) //
.compileBothTripSort());
map2.put(5l, new RouteTripSpec(5l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, COLISEUM, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5210", "1083", "1336", "1188",
/* + */"1051"/* + */, //
"1268", "1202" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1202", "1328", "1620", "5210" })) //
.compileBothTripSort());
map2.put(6l, new RouteTripSpec(6l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"2203", // Southgate Transit Centre
"2211", // Southgate Transit Centre
"2085", // ++
"2024", // ++
"2109", // Millgate Transit Centre
"2102", // Millgate Transit Centre
"3281", // ++
"3121", // ++
"3215", // Mill Woods Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"3215", // Mill Woods Transit Centre
"3127", // ++
"3347", // ++
"2109", // Millgate Transit Centre
"2273", // ++
"2179", // ++
"2211", // Southgate Transit Centre
"2203", // Southgate Transit Centre
})) //
.compileBothTripSort());
map2.put(7l, new RouteTripSpec(7l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"5108", // Jasper Place Transit Centre
"1881", // 124 Street & 107 Avenue
"1829", // 105 Street & 105 Avenue
"1542", // ++
"2659", // ++
"2891", // ++
"2890", // 114 Street & 89 Avenue
"2002", // University Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"2002", // University Transit Centre
"2860", // ++
"2824", // ++
"1457", // ++
"1989", // 108 Street & 104 Avenue
"1808", // ++
"5108", // Jasper Place Transit Centre
})) //
.compileBothTripSort());
map2.put(8l, new RouteTripSpec(8l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"3207", // Mill Woods Transit Centre
"3122", // ==
"3244", // !=
"3338", // !=
"3462", // !=
"3498", // !=
"3264", // ==
"2108", // Millgate Transit Centre
"1989", // 108 Street & 104 Avenue
"1106", // Kingsway RAH Transit
"1476", // 106 Street & 118 Avenue
"1201", // Coliseum Transit Centre
"1001", // Abbottsfield Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"1001", // Abbottsfield Transit Centre
"1208", // Coliseum Transit Centre
"1112", // Kingsway RAH Transit Centre
"1557", // 109 Street & 105 Avenue
"2103", // Millgate Transit Centre
"3599", // ==
"3676", // !=
"3360", // !=
"3394", // !=
"3121", // ==
"3207", // Mill Woods Transit Centre
})) //
.compileBothTripSort());
map2.put(9l, new RouteTripSpec(9l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) // SOUTHGATE
.addTripSort(MDirectionType.NORTH.intValue(), // CENTURY_PK / SOUTHGATE => EAUX_CLAIRES
Arrays.asList(new String[] { //
"4216", // Century Park Transit Centre
"2218", // == Southgate Transit Centre
"2623", // ==
"2658", // !=
"2830", "2657", // !=
"2852", // ==
"1591", // 101 Street & MacDonald Drive
"1108", // 101 Street & MacDonald Drive
"1476", // 106 Street & 118 Avenue
"7016", // Northgate Transit Centre
"6317" // Eaux Claires Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), // EAUX_CLAIRES => CENTURY_PK / SOUTHGATE
Arrays.asList(new String[] { //
"6317", // Eaux Claires Transit Centre
"7001", // Northgate Transit Centre
"1532", // 106 Street & 118 Avenue Loop
"1142", // 101 Street & MacDonald Drive nearside
"2631",// ==
"2895", "2833", // !=
"-22352", // !=
"2773", // ==
"2639", // ==
"-22223", // !=
"2218", // == Southgate Transit Centre
"2206", // Southgate Transit Centre
"4216" // Century Park Transit Centre
})) //
.compileBothTripSort());
map2.put(10l, new RouteTripSpec(10l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW_EAST_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, COLISEUM_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"1203", // Coliseum Transit Centre
"7186", // 69 Street & 144 Avenue
"7209", // Belvedere Transit Centre
"7101", // East Clareview Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"7101", // East Clareview Transit Centre
"7884", // Victoria Trail & Hooke Road
"7201", // Belvedere Transit Centre
"7572", // 66 Street & 144 Avenue
"1203", // Coliseum Transit Centre
})) //
.compileBothTripSort());
map2.put(11l, new RouteTripSpec(11l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"7007", "7186", "7106" //
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"7106", "7572", "7008", "7496", "7007" //
})) //
.compileBothTripSort());
map2.put(12l, new RouteTripSpec(12l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"1251", "1529", //
"1476", "1434", "1435", //
"1553", // ==
"1032", // !=
"1109", // ==
"1886", // !=
"11307", //
"1821", //
"1669", //
"6122", //
"6328", "6252", "7003" //
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"7003", "6551", //
"6369", //
"6289", //
"6372", //
"-11330", //
"1932", "1847", //
"1778", //
"1847", "1951", //
"1109", "1533", "1476", //
"11326", "1113", "1251" //
})) //
.compileBothTripSort());
map2.put(13l, new RouteTripSpec(13l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"6005", // Castle Downs Transit Centre
"7011", // Northgate Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"7011", // Northgate Transit Centre
"6005", // Castle Downs Transit Centre
})) //
.compileBothTripSort());
map2.put(14l, new RouteTripSpec(14l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"5011", // West Edmonton Mall Transit Centre
"5024", // 180 Street & 98 Avenue
"5153", // == 159 Street & Stony Plain Road
"5112", // != 157 Street & Stony Plain Road nearside
"5103",// != Jasper Place Transit Centre
"5293", // != 143 Street & Stony Plain Road
"1999" // != 100 Street & 103A Avenue nearside
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"1123", // 99 Street & 103A Avenue nearside
"1812", // == 111 Street & Jasper Avenue Nearside
"1828", // != 124 Street & 102 Avenue
"1971", // != 124 Street & 102 Avenue
"5185", // == 142 Street & Stony Plain Road
"5103", // Jasper Place Transit Centre
"5855", // 182 Street & 97A Avenue
"5011" // West Edmonton Mall Transit Centre
})) //
.compileBothTripSort());
map2.put(15l, new RouteTripSpec(15l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"3208", // Mill Woods Transit Centre
"2117", // Millgate Transit Centre
"1457", // 100 Street & Jasper Avenue
"1989", // 108 Street & 104 Avenue
"1227", // ++
"1532", // ++
"1476", // 106 Street & 118 Avenue
"6317", // Eaux Claires Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"6317", // Eaux Claires Transit Centre
"1532", // 106 Street & 118 Avenue Loop
"1557", // 109 Street & 105 Avenue
"1542", // 100 Street & Jasper Avenue
"2118", // Millgate Transit Centre
"3208", // Mill Woods Transit Centre
})) //
.compileBothTripSort());
map2.put(16l, new RouteTripSpec(16l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, GOV_CTR) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "1310", "7011", "6314", "6075", "6576", "6009" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "6009", "6124", "6317",
/* + */"7011"/* + */, //
"7003", "1310" })) //
.compileBothTripSort());
map2.put(17l, new RouteTripSpec(17l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"4203", // Century Park Transit Centre
"2206", // Southgate Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"2206", // Southgate Transit Centre
"4203", // Century Park Transit Centre
})) //
.compileBothTripSort());
map2.put(23l, new RouteTripSpec(23l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"5001", // West Edmonton Mall Transit Centre
"4202", // Century Park Transit Centre
"3217", // Mill Woods Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"3217", // Mill Woods Transit Centre
"4211", // Century Park Transit Centre
"5001", // West Edmonton Mall Transit Centre
})) //
.compileBothTripSort());
map2.put(24l, new RouteTripSpec(24l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"4806", // Leger Transit Centre
"9093", // Anderson Crescent W Ent & Anderson Way SW
"9095", // ++
"9096", // ++
"9097", // ++
"9098", // ++
"9241", // ++
"9244", // ++
"9245", // ++
"9246", // ++
"9673", // ++
"9405", // ++
"9633", // ++
"9815", // !=
"9057", // ==
"9630", // == Rabbit Hill Road & Ellerslie Road
"9071", // !=
"9072", // !=
"9631", // ==
"4106", // !=
"4864", // ++
"4548", // ++
"4201", // Century Park Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"4201", // Century Park Transit Centre
"4456", // ++
"4105", // ++
"4790", // !=
"9057", // ==
"9630", // == Rabbit Hill Road & Ellerslie Road
"9071", // !=
"9072", // !=
"9631", // ==
"9635", // !=
"9634", // ++
"9770", // ++
"9092", // 170 Street & Anderson Way SW
"4806", // Leger Transit Centre
})) //
.compileBothTripSort());
map2.put(25l, new RouteTripSpec(25l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "4801",
/* + */"4938"/* + */, //
/* + */"9415"/* + */, //
/* + */"9486"/* + */, //
/* + */"9557"/* + */, //
/* + */"9176"/* + */, //
/* + */"9632"/* + */, //
/* + */"9713"/* + */, //
/* + */"9094"/* + */, //
/* + */"9446"/* + */, //
/* + */"4106"/* + */, //
"4212" //
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "4212",
/* + */"9324"/* + */, //
"9632",
/* + */"9409"/* + */, //
/* + */"9553"/* + */, //
/* + */"9555"/* + */, //
/* + */"9412"/* + */, //
/* + */"9486"/* + */, //
/* + */"9415"/* + */, //
/* + */"9486"/* + */, //
"9526", "4801" //
})) //
.compileBothTripSort());
map2.put(26l, new RouteTripSpec(26l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WINDERMERE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "9460",
/* + */"9632"/* + */, //
"4808" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4808",
/* + */"9710"/* + */,//
"9460" })) //
.compileBothTripSort());
map2.put(30l, new RouteTripSpec(30l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3217", "4211", "4811", "4597", "4153", "2704" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2704", "4021", "4494", "4811", "4803", "4202", "3217" })) //
.compileBothTripSort());
map2.put(31l, new RouteTripSpec(31l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"4813", // Leger Transit Centre
"4308", // Hodgson Boulevard & Hilliard Green
"4329", // Carter Crest Road West & Rabbit Hill Road
"2208", // Southgate Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"2208", // Southgate Transit Centre
"4439", // Terwillegar Drive & 40 Avenue
"4834", // Hodgson Boulevard & Hilliard Green
"4813", // Leger Transit Centre
})) //
.compileBothTripSort());
map2.put(32l, new RouteTripSpec(32l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BRANDER_GDNS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"4025", // 148 Street & Riverbend Road nearside
"4153", // Whitemud Drive NB & 53 Avenue
"2705", // South Campus Transit Centre Fort Edmonton Park
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"2705", // South Campus Transit Centre Fort Edmonton Park
"4021", // Whitemud Drive SB & 53 Avenue
"4025", // 148 Street & Riverbend Road nearside
})) //
.compileBothTripSort());
map2.put(33l, new RouteTripSpec(33l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MEADOWS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"5001", "4021", "4040", "2973", "2205", "2215", "2118", "3713" //
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"3713", "2117", "2205", "2984", "4021", "4153", "5001" //
})) //
.compileBothTripSort());
map2.put(34l, new RouteTripSpec(34l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"4809", // Leger Transit Centre
"4069", // Bulyea Road & Burton Road S
"2209", // Southgate Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"2209", // Southgate Transit Centre
"4167", // Bulyea Road & Terwillegar Drive
"4809", // Leger Transit Centre
})) //
.compileBothTripSort());
map2.put(35l, new RouteTripSpec(35l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"4812", // Leger Transit Centre
"4935", // 156 Street & South Terwillegar Boulevard
"4367", // Rabbit Hill Road & 23 Avenue
"4215", // Century Park Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"4215", // Century Park Transit Centre
"4114", // Rabbit Hill Road & 23 Avenue
"4936", // 156 Street & 9 Avenue
"4812", // Leger Transit Centre
})) //
.compileBothTripSort());
map2.put(36l, new RouteTripSpec(36l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"4211", // Century Park Transit Centre
"4749", // !=
"4810", // <> Leger Transit Centre
"4530", // !=
"4455", // Falconer Road & Riverbend Square
"4158", // Whitemud Drive SB & 53 Avenue
"2703", // South Campus Transit Centre Fort Edmonton Park
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"2703", // South Campus Transit Centre Fort Edmonton Park
"4021", // Whitemud Drive SB & 53 Avenue
"4129", // Falconer Road & Riverbend Square
"4483", // !=
"4810", // <> Leger Transit Centre => SOUTH_CAMPUS_TC
"4804", // Leger Transit Centre
"4211", // Century Park Transit Centre
})) //
.compileBothTripSort());
map2.put(37l, new RouteTripSpec(37l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"4802", // Leger Transit Centre
"4117", // Towne Centre Boulevard & Terwillegar Boulevard
"4754", // McLay Crescent W & MacTaggart Drive
"4215", // Century Park Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"4215", // Century Park Transit Centre
"4643", // Rabbit Hill Road & Terwillegar Boulevard
"4856", // Towne Centre Boulevard & Terwillegar Boulevard
"4802", // Leger Transit Centre
})) //
.compileBothTripSort());
map2.put(38l, new RouteTripSpec(38l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"4805", // Leger Transit Centre
"4519", // !=
"4122", // !=
"4938", // !=
"4455", // !=
"4427", // ==
"4288", // ++
"4469", // ==
"4597", // != Riverbend Road & Rabbit Hill Road
"4191", // !=
"4041", // ==
"4037", // 143 Street & 53 Avenue
"4038", // ++
"4031", // ++
"4034", // 144 Street & 60 Avenue
"4279", // ==
"4040", // != Whitemud Drive SB & 53 Avenue
"2207", // Southgate Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"2207", // Southgate Transit Centre
"4020", // !=
"4041", // ==
"4037", // 143 Street & 53 Avenue
"4038", // ++
"4031", // ++
"4034", // 144 Street & 60 Avenue
"4279", // ==
"4021", // !=
"4126", // !=
"4427", // ==
"4288", // ++
"4469", // ==
"4042", // == Riverbend Road & Rabbit Hill Road
"4373", // !=
"4262", // !=
"4320", // !=
"4749", // !=
"4805", // Leger Transit Centre
})) //
.compileBothTripSort());
map2.put(39l, new RouteTripSpec(39l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RUTHERFORD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"9242", // 117 Street & Rutherford Road SW
"9685", // McMullen Green & MacEwan Road SW
"4213", // Century Park Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"4213", // Century Park Transit Centre
"9666", // 111 Street & MacEwan Road SW
"9242", // 117 Street & Rutherford Road SW
})) //
.compileBothTripSort());
map2.put(40l, new RouteTripSpec(40l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, YELLOWBIRD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"4290", // 105 Street & 21 Avenue #Yellowbird
"4118", // ++
"4206", // Century Park Transit Centre
"4224", // ++
"4054", // ++
"2203", // Southgate Transit Centre
"2211" // Southgate Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"2203", // Southgate Transit Centre
"2211", // Southgate Transit Centre
"4490", // ++
"4164", // ++
"4205", // Century Park Transit Centre
"4467", // ++
"4290" // 105 Street & 21 Avenue #Yellowbird
})) //
.compileBothTripSort());
map2.put(41l, new RouteTripSpec(41l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4208", "4168", "2213" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2213", "4486", "4208" })) //
.compileBothTripSort());
map2.put(42l, new RouteTripSpec(42l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4209", "4070", "2217" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2217", "4342", "4209" })) //
.compileBothTripSort());
map2.put(43l, new RouteTripSpec(43l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"4214", // Century Park Transit Centre
"4151", // == 111 Street & Saddleback Road N Ent
"4543", // != 112 Street & Saddleback Road North Ent
"4156", // != Saddleback Road & 27 Avenue
"4547", // != 112 Street & Saddleback Road North Ent
"4493", // != 116 Street & 30 Avenue
"4154", // == 117 Street & 28 Avenue
"2711", // South Campus Transit Centre Fort Edmonton Park
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"2711", // South Campus Transit Centre Fort Edmonton Park
"4337", // == 117 Street & 28 Avenue
"4096", // != Saddleback Road & 27 Avenue
"4166", // != 113 Street & Saddleback Road N Ent
"4566", // != 116 Street & 30 Avenue
"4245", // != 112 Street & 29A Avenue
"4088", // == 112 Street & Saddleback Road North Ent
"4214", // Century Park Transit Centre
})) //
.compileBothTripSort());
map2.put(44l, new RouteTripSpec(44l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, TWIN_BROOKS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4265",
/* + */"4233"/* + */, //
"4204", "4210", "4362", "2204" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2204", "4198", "4204",
/* + */"4348"/* + */, //
"4265" })) //
.compileBothTripSort());
map2.put(45l, new RouteTripSpec(45l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4207", "4588", "2214" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2214", "2888", "4198", "4207" })) //
.compileBothTripSort());
map2.put(46l, new RouteTripSpec(46l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HARRY_AINLAY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, YELLOWBIRD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4290", "4209", "4307" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4490", "4208", "4290" })) //
.compileBothTripSort());
map2.put(47l, new RouteTripSpec(47l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ALLARD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"9301", // Allard Boulevard & Alexander Way SW
"9163", // Callaghan Drive & Callaghan Point
"4548", // == 111 Street & 23 Avenue
"4214", // != Century Park Transit Centre
"4206" // != Century Park Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"4206", // != Century Park Transit Centre
"4214", // != Century Park Transit Centre
"4456", // == 111 Street & 23 Avenue
"9164", // Callaghan Drive & Callaghan Close
"9301" // Allard Boulevard & Alexander Way SW
})) //
.compileBothTripSort());
map2.put(48l, new RouteTripSpec(48l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BLACKBURNE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "9226",
/* + */"4002"/* + */, //
"4204" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4204",
/* + */"9551"/* + */, //
"9226" })) //
.compileBothTripSort());
map2.put(49l, new RouteTripSpec(49l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BLACKMUD_CRK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "9756", "9542", "4210" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4210",
/* + */"4105"/* + */, //
"9756" })) //
.compileBothTripSort());
map2.put(50l, new RouteTripSpec(50l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2210", "4277", "2517", "2957", "2710" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2710", "2510", "2924", "4474", "2210" })) //
.compileBothTripSort());
map2.put(51l, new RouteTripSpec(51l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, PARKALLEN) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"2795", // 112 Street & 65 Avenue nearside
"2752", // ==
"2982", // !=
"22354", // !=
"2638", // ==
"2890", // 114 Street & 89 Avenue
"2001", // University Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"2001", // University Transit Centre
/* + */"2889"/* + */, //
"2795", // 112 Street & 65 Avenue nearside
})) //
.compileBothTripSort());
map2.put(52l, new RouteTripSpec(52l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"2212", // Southgate Transit Centre
"2887", //
"2849", // 104 Street & 81 Avenue
"2632", //
"2162", // ==
"1425", // >>>>>>
"-1425", // !=
"1728", //
"1991", //
"1308", // Government Transit Centre
"1794", // ==
"1769", // !=
"1693", // !=
"1711", // !=
"1271", // !=
"1777",// ==
"1777", // 103 Street & Jasper Avenue
"11321", //
"1292", // 100 Street & 102A Avenue
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"1292", // 100 Street & 102A Avenue
"1262", //
"1620", // ==
"1673", // !=
"1964", // !=
"1949", // !=
"1708", // !=
"1941", // ==
"1305", // Government Transit Centre
"1792", //
"1629", //
"1993", //
"-1425", // !=
"1425", // <<<<<<<
"1567", // ==
"2768", //
"2899", //
"2821", // 104 Street & 82 Avenue
"2665", //
"2212" // Southgate Transit Centre
})) //
.compileBothTripSort());
map2.put(53l, new RouteTripSpec(53l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2216", "2973", "2712" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2712", "2511", "2216" })) //
.compileBothTripSort());
map2.put(54l, new RouteTripSpec(54l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2710", "2891", "2001" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2001", "2821", "2710" })) //
.compileBothTripSort());
map2.put(55l, new RouteTripSpec(55l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2202", "2830", "2709" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2709", "2966", "2202" })) //
.compileBothTripSort());
map2.put(57l, new RouteTripSpec(57l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"2002", // University Transit Centre
"2860", // ++
"2824", // ++
"1246", // ++
"1364", // ++
"1358", // 99 Street & 104 Avenue
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"1358", // 99 Street & 104 Avenue
"1608", // ++
"2659", // ++
"2891", // ++
"2890", // 114 Street & 89 Avenue
"2002", // University Transit Centre
})) //
.compileBothTripSort());
map2.put(59l, new RouteTripSpec(59l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_EDM_COMMON) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "3440", "3003", "3209" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3209", "3007", "3440" })) //
.compileBothTripSort());
map2.put(60l, new RouteTripSpec(60l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3210", "3585", "2104", "2101", "1780", "1989" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1824", "1780", "2104", "3233", "3210" })) //
.compileBothTripSort());
map2.put(61l, new RouteTripSpec(61l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3211", "3585", "2105", "2104", "1780", "1989" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1824", "1780", "2105", "3529", "3211" })) //
.compileBothTripSort());
map2.put(62l, new RouteTripSpec(62l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHWOOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"3154", // Mill Woods Road E & 20 Avenue
"3128", // !=
"3126", // ==
"3212", // Mill Woods Transit Centre
"3127", // ==
"3087", // !=
"1989", // 108 Street & 104 Avenue
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"1824", // 108 Street & 104 Avenue
"3090", // !=
"3126", // ==
"-33219", // !=
"3203", // Mill Woods Transit Centre
"3127", // ==
"3129", // !=
"3154", // Mill Woods Road E & 20 Avenue
})) //
.compileBothTripSort());
map2.put(63l, new RouteTripSpec(63l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHWOOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"3143", // 48 Street & Mill Woods Road S
"3165", // ++
"3167", // ???
"3169", // ???
"3171", // ???
"3173", // ???
"3254", // ???
"3148", // ???
"3146", // ???
"3144", // ???
"3142", // ???
"3140", // ???
"3065", // ???
"3067", // ???
"3069", // ???
"3071", // ???
"3073", // ???
"3075", // ???
"3077", // ???
"3079", // ???
"3081", // ???
"3083", // ???
"3085", // ???
"3130", // ???
"3128", // !=
"3126", // ==
"3204", // == Mill Woods Transit Centre
"3212", // != Mill Woods Transit Centre
"3127", // ==
"3087", // !=
"1358", // 99 Street & 104 Avenue
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"1358", // 99 Street & 104 Avenue
"3090", // !=
"3126", // ==
"3204", // == Mill Woods Transit Centre
"3127", // ==
"3129", // !=
"3141", // !=
"3143", // 48 Street & Mill Woods Road S
})) //
.compileBothTripSort());
map2.put(64l, new RouteTripSpec(64l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KNOTTWOOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3026", "3006", "3001", "3208", "2111", //
"1246", "1609", "1364", //
"1358" //
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1358", //
"1609", "1570", "1608", //
"2112", "3208", "3009", "3026" //
})) //
.compileBothTripSort());
map2.put(65l, new RouteTripSpec(65l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KNOTTWOOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3023", "3006", "3001", "3208", "2111", "1989" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1824", "2112", "3208", "3009", "3023" })) //
.compileBothTripSort());
map2.put(66l, new RouteTripSpec(66l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3213", "3011", "2101", "2105", //
"1246", "1609", "1364", //
"1358" //
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1358", //
"1609", "1570", "1608", //
"2101", "3011", "3003", "3213" //
})) //
.compileBothTripSort());
map2.put(67l, new RouteTripSpec(67l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MEADOWS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "3206", "3952", "3957", "3708" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3708", "3957", "3950", "3311", "3116", "3206" })) //
.compileBothTripSort());
map2.put(68l, new RouteTripSpec(68l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), Arrays.asList(new String[] { "3202", "3399", "3586", "2107", "2110", "1989" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), Arrays.asList(new String[] { "1824", "2107", "3230", "3584", "3202" })) //
.compileBothTripSort());
map2.put(69l, new RouteTripSpec(69l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"3214", // Mill Woods Transit Centre
"3695", // ==
"3400", //
"3506", // !=
"3702", // == Meadows Transit Centre
"3705", // == Meadows Transit Centre
"3124", //
"3722", // !=
"2024", // !=
"2110", // Millgate Transit Centre => MILL_WOODS_TC
"2107", // Millgate Transit Centre => DOWNTOWN
"2026", // ++
"1989" // 108 Street & 104 Avenue
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"2110", // Millgate Transit Centre
"2371", // !=
"3953", // !=
"3710", // == Meadows Transit Centre
"3611", //
"3653", // !=
"3411", // ==
"3214" // Mill Woods Transit Centre
})) //
.compileBothTripSort());
map2.put(70l, new RouteTripSpec(70l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"3201", // Mill Woods Transit Centre
"2697", // == 99 Street & 82 Avenue
"2659", // != 99 Street & 82 Avenue STOP
"2824", // != 99 Street & 83 Avenue CONTINUE
"1190", // == McDougall Hill & Grierson Hill
"1262", // != 100 Street & Jasper Avenue
"1292", // != 100 Street & 102A Avenue
"1457", // != 100 Street & Jasper Avenue
"1780", // != 103 Street & 102 Avenue
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"1292", // != 100 Street & 102A Avenue
"1780", // != 103 Street & 102 Avenue
"1322", // != 103 Street & Jasper Avenue
"1336", // != 101 Street & Jasper Avenue
"1542", // == 100 Street & Jasper Avenue
"2878", // != 99 Street & 85 Avenue
"2659", // != 99 Street & 82 Avenue
"2840", // == 99 Street & 81 Avenue
"3201" // Mill Woods Transit Centre
})) //
.compileBothTripSort());
map2.put(71l, new RouteTripSpec(71l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, GOV_CTR, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"3216", // Mill Woods Transit Centre
"3337", // ++
"1153", // 106 Street & 97 Avenue
"1614", // 109 Street & 97 Avenue
"1303", // Government Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"1303", // Government Transit Centre
"1993", // 106 Street & 97 Avenue
"3543", // ++
"3216", // Mill Woods Transit Centre
})) //
.compileBothTripSort());
map2.put(72l, new RouteTripSpec(72l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, // MILLGATE
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3206", "3255", "3796", "3491", "2106", "2106", "2110", "1989" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1989", "2110", "2106", "3355", "3748", "3185", "3206" })) //
.compileBothTripSort());
map2.put(73l, new RouteTripSpec(73l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"2203", // Southgate Transit Centre
"2211", // Southgate Transit Centre
"2888", "2102", "3002", //
"3205" // Mill Woods Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"3205", // Mill Woods Transit Centre
"3010", "2109", //
"2203", // Southgate Transit Centre
"2211" // Southgate Transit Centre
})) //
.compileBothTripSort());
map2.put(74l, new RouteTripSpec(74l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "2204", "4202",
/* + */"3671"/* + */, //
"3107", "3559", "3209" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3209", "3007", "3430", "3110", "4202", "4212", "2204" })) //
.compileBothTripSort());
map2.put(77l, new RouteTripSpec(77l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "4210", "9850", "9111", "3205" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3205", "9761", "9361", "4210" })) //
.compileBothTripSort());
map2.put(78l, new RouteTripSpec(78l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "4205", "3675", "9384", "9725", "3215" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3215", "9147", "9387", "3865", "4205" })) //
.compileBothTripSort());
map2.put(79l, new RouteTripSpec(79l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "4207", "3319", "9260", "9139", "3214" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3214", "9287", "9671", "3513", "4207" })) //
.compileBothTripSort());
map2.put(80l, new RouteTripSpec(80l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "2218", "2769", "2826", "2551", "2599", "2223", "2305" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2305", "2152", "2264", "2188", "2622", "2837", "2888", /* + */"2630"/* + */, "2218" })) //
.compileBothTripSort());
map2.put(81l, new RouteTripSpec(81l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3216", "2106", "2338", //
/* + */"2697"/* + */, "2659",/* + */"2824"/* + */, //
/* + */"1246"/* + */, "1383", //
"1246", "1609", "1364", //
"1358" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1358", //
"1609", "1570", "1608", //
"1383", /* + */"2835"/* + */, //
/* + */"2878"/* + */, /* ? */"2659"/* ? */, "2840", //
"2385", "2106", "2104", "3216" })) //
.compileBothTripSort());
map2.put(82l, new RouteTripSpec(82l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3212", "2339", "2551", "1383", //
"1246", "1609", "1364", //
"1358" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1358", //
"1609", "1570", "1608", //
"1383", "2255", "2528", "3212" })) //
.compileBothTripSort());
map2.put(83l, new RouteTripSpec(83l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MEADOWS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"1383", //
"1542", // !=
"2196", // ==
"2393", // ==
"2952", // !=
"2188", // ==
"2572", // !=
"2805", //
"2911", // ==
"2536", "2235", // !=
"2362", "2136", // !=
"2078", // ==
"2034", // !=
"2143", // ==
"2286", // !=
"2943", "2813", // !=
"2431", // ==
"2468", // ==
"2415", // !=
"2693", "2259", // ==
"22189", // !=
"3706" //
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"3706", //
"22349", // !=
"22188", // ==
"2693", "2259", // ==
"22178", // !=
"2389", // ==
"2148", "2913", // !=
"2357", "2598", // !=
"2802", // ==
"2804", "2551", //
"2329", // !=
"2196", // ==
"1457", // !=
"1383" //
})) //
.compileBothTripSort());
map2.put(84l, new RouteTripSpec(84l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2111", "2303" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2303", "2112" })) //
.compileBothTripSort());
map2.put(85l, new RouteTripSpec(85l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"1383", //
"2434", // ==
"2059", // !=
"2985", "2560", // 1=
"2379", // ==
"2307" //
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"2307", "1383", //
})) //
.compileBothTripSort());
map2.put(86l, new RouteTripSpec(86l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1358", //
"1609", "1570", "1608", //
"2073", "2302" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2302", "2442", //
"1246", "1609", "1364", //
"1358" })) //
.compileBothTripSort());
map2.put(87l, new RouteTripSpec(87l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2106", "2338", "2824", "1383", //
"1246", "1609", "1364", //
"1358" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1358", //
"1609", "1570", "1608", //
"1383", "2285", "2385", "2106" })) //
.compileBothTripSort());
map2.put(88l, new RouteTripSpec(88l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1680", "1336", "2274", "2449", "2307" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2307", "2298", "2267", "1718" })) //
.compileBothTripSort());
map2.put(89l, new RouteTripSpec(89l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MEADOWS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, TAMARACK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"3691", // Tamarack Green & 35 Ave
"3608", // ++
"3610", // ++
"3192", // ++
"3193", // !=
"3505", // !=
"3193", // ++
"3979", // Maple Rd & Loop
"3773", // ++
"3781", // !=
"3613", // Tamarack Way & 38 Ave
"3711", // Meadows TC
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] {//
"3711", // Meadows TC
"3851", // 19 St & 35 Ave
"3605", // ++
"3691" // Tamarack Green & 35 Ave
})) //
.compileBothTripSort());
map2.put(90l, new RouteTripSpec(90l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MEADOWS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1824", "2255", "3707" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3709", "2551", "1989" })) //
.compileBothTripSort());
map2.put(91l, new RouteTripSpec(91l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHLANDS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2307",
/* + */"2425"/* + */, //
"1371" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1371", "1131", "2307" })) //
.compileBothTripSort());
map2.put(92l, new RouteTripSpec(92l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, PLYPOW, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "2101", "2118", "2876", /* + */"22330"/* + */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /*-"2876"-*//* + */"22330"/* + */, /* + */"22196"/* + */, "2118", "2101" })) //
.compileBothTripSort());
map2.put(94l, new RouteTripSpec(94l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"2002", // University Transit Centre
"2860", // ++
"2447", // ++
"2274", // ++
"2449", // ++
"2303", // Capilano Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"2303", // Capilano Transit Centre
"2298", // ++
"2591", // ++
"2159", // ++
"2891", // ++
"2752", // ==
"2982", // != 114 Street & 83 Avenue
"22354", // !=
"2638", // ==
"2002", // University Transit Centre
})) //
.compileBothTripSort());
map2.put(95l, new RouteTripSpec(95l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAUREL, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"3213",
/* + */"3189"/* + */, //
/* + */"3952"/* + */, //
/* + */"3618"/* + */, //
/* + */"3303"/* + */, //
"3305", "3703" //
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"3703", "3303",
/* + */"3761"/* + */, //
/* + */"3620"/* + */, //
"3213" //
})) //
.compileBothTripSort());
map2.put(96l, new RouteTripSpec(96l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BONNIE_DOON, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2110", /* + */"2433"/* + */, "2196" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2196", /* + */"2074"/* + */, "2110" })) //
.compileBothTripSort());
map2.put(97l, new RouteTripSpec(97l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NAIT, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3217", "3008", "2111", "1702", "1059" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1093", "1824", "2112", "3002", "3217" })) //
.compileBothTripSort());
map2.put(98l, new RouteTripSpec(98l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NAIT, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5219", "1059" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1093", "5003" })) //
.compileBothTripSort());
map2.put(99l, new RouteTripSpec(99l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELVEDERE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2304", "1206", "7211" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7211", "1207", "2304" })) //
.compileBothTripSort());
map2.put(100l, new RouteTripSpec(100l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1243", "1812", /* + */"5449"/* + */, /* + */"5001"/* + */, "5010", "8610" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "8610", "5001", /* + */"5054"/* + */, "1083", "1256", "1243" })) //
.compileBothTripSort());
map2.put(101l, new RouteTripSpec(101l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEDGEWOOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* + */"5968"/* + */, "5908", "5821", "5002" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5002", "5979", "5660", /* + */"5968"/* + */})) //
.compileBothTripSort());
map2.put(102l, new RouteTripSpec(102l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LYMBURN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5828", "5725", "5004" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5004", "5755", "5828" })) //
.compileBothTripSort());
map2.put(103l, new RouteTripSpec(103l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAMERON_HTS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5752", "5695", "5821", "5002" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5002", "5979", "5623", "5752" })) //
.compileBothTripSort());
map2.put(104l, new RouteTripSpec(104l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LYMBURN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /*-"5755",-*/"5828", /* + */"5725"/* + */, "5821", "2706" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2706", /*-"5725"-,*//* + */"5755"/* + */,/* + */"5828"/* + */})) //
.compileBothTripSort());
map2.put(105l, new RouteTripSpec(105l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LESSARD) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* + */"5017"/* + */, /* + */"5932"/* + */, /* "-5634-", */"5733", "5821", "2706" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2706", /* "-5932-", *//* + */"5634"/* + */,/* + */"5017"/* + */})) //
.compileBothTripSort());
map2.put(106l, new RouteTripSpec(106l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LESSARD) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"5733", // 172 Street & Callingwood Road
"5650", //
"5900", "5757", "5722", "5638", "5671", "5974", "5821", "5749", "5923", "5750", //
"5463", //
"5004", // West Edmonton Mall Transit Centre END
"5007", // West Edmonton Mall Transit Centre CONTINUE
"5054", //
"5186", "5486", "5566", "5578", "5359", "5281", "5197", "5332", "5451", "5499", "5298", "4425", "22162", "2978", //
"22159", //
"2713", // South Campus Transit Centre Fort Edmonton Park
"2885", //
"22157", "2959", "2944", "2505", "2516", //
"2748", // ==
"2982", // !=
"22354", // !=
"2638", // ==
"2625", // ++
"2890", // 114 Street & 89 Avenue
"2001", // University Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"2001", // University Transit Centre
"2641", // ++
"5004", // West Edmonton Mall Transit Centre
"5733", // 172 Street & Callingwood Road
})) //
.compileBothTripSort());
map2.put(107l, new RouteTripSpec(107l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTRIDGE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5747", "5657", "5005" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5005", "5877", "5747" })) //
.compileBothTripSort());
map2.put(108l, new RouteTripSpec(108l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BRECKENRIDGE_GRNS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "8670", "8279", "8608" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "8608", "8999", "8670" })) //
.compileBothTripSort());
map2.put(109l, new RouteTripSpec(109l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5012", "5874", /* + */"5366"/* + */, "5111", /* + */"5250"/* + */, "5344", "1496" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1571", "5132", "5111", "5903", "5012" })) //
.compileBothTripSort());
map2.put(110l, new RouteTripSpec(110l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTRIDGE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5005", "5877", "5747" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5747", "5811", "5811", "5005" })) //
.compileBothTripSort());
map2.put(111l, new RouteTripSpec(111l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5001", "5795", "5109", "1620" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1620", "5104", "5846", "5001" })) //
.compileBothTripSort());
map2.put(112l, new RouteTripSpec(112l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5013", "5433", "5344", "1910",
/* + */"1824"/* + */, //
"1542", "2122", "2302" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2302", "2497", "1457",
/* + */"1989"/* + */, //
"1878", "5132", "5038", "5013" })) //
.compileBothTripSort());
map2.put(113l, new RouteTripSpec(113l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5001", "5069", "5104" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5104", "5151", "5001" })) //
.compileBothTripSort());
map2.put(114l, new RouteTripSpec(114l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTVIEW_VLG) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "8846", "8941", "5105" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5105", "8904", "8849", "8846" })) //
.compileBothTripSort());
map2.put(115l, new RouteTripSpec(115l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"5013", "5433", "5344", "5209", //
"5549", // ==
"1759", // !=
"1867", // !=
"1665", // !=
"6122", // ==
"6333", "7011" //
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"7011", //
"-77862", //
"6348", //
"6369", "6289", // ==
"5173", // !=
"6372", // !=
"1932", // !=
"5090", // ==
"5203", "5132", "5038", "5013"//
})) //
.compileBothTripSort());
map2.put(117l, new RouteTripSpec(117l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, THE_HAMPTONS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"5010", // West Edmonton Mall Transit Centre
"5819", // != 189 Street & 87 Avenue
"8607", // <> Lewis Farms Transit Centre
"8536", // != West Henday Promenade Access & Webber Greens Drive
"8135", // ++ Guardian Road & Whitemud Drive
"8106", // 199 Street & 62 Avenue
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"8106", // 199 Street & 62 Avenue
"8390", // == 199 Street & Pipeline Compressor Station
"8034", // ?? 199 Street & Christian Assembly Driveway
"8430", // ?? 199 Street & Fieldstone Estates Driveway
"8361", // == 199 Street & 69 Avenue
"8033", // ++ Guardian Road & Whitemud Drive
"8406", // == != Suder Greens Drive & Webber Greens Drive
"8607", // != <> Lewis Farms Transit Centre => THE_HAMPTONS
"8605", // !=Lewis Farms Transit Centre => WEST_EDM_MALL
"5783", // == 187 Street & 87 Avenue
"5010", // West Edmonton Mall Transit Centre
})) //
.compileBothTripSort());
map2.put(118l, new RouteTripSpec(118l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RIO_TERRACE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5174", "5302", "5103" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5103", "5301", "5174" })) //
.compileBothTripSort());
map2.put(119l, new RouteTripSpec(119l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, THE_HAMPTONS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "8583", "8097", "8033", "8607" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "8607", "8135", "8097", "8046", "8583" })) //
.compileBothTripSort());
map2.put(120l, new RouteTripSpec(120l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STADIUM, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5110", "1242", "1083", "1336", "1407" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1407", "1328", "1620", "1746", "5110" })) //
.compileBothTripSort());
map2.put(121l, new RouteTripSpec(121l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5205", "5215", "6345", "6646", "7011" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7011", "6183", "6371", "5404", "5205" })) //
.compileBothTripSort());
map2.put(122l, new RouteTripSpec(122l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5012", "8389", "5928", "5330", "5207" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5207", "5143", "5389", "8952", "5012" })) //
.compileBothTripSort());
map2.put(123l, new RouteTripSpec(123l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5105", "8691", "5648", "5374", "5205" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5205", "5692", "5635", "8684", "5105" })) //
.compileBothTripSort());
map2.put(124l, new RouteTripSpec(124l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE) // MISTATIM_IND
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"5106", "6231", "5204" //
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"5204", "6781", "5106" //
})) //
.compileBothTripSort());
map2.put(125l, new RouteTripSpec(125l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELVEDERE, // DOWNTOWN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"5101", // Jasper Place Transit Centre
"5469", // !=
"5448",// 161 Street & 109 Avenue
"5127", // !=
"5202", // == Westmount Transit Centre
"5098", // !=
"11326", // ==
"1105", // == Kingsway RAH Transit Centre LAST
"1107", // Kingsway RAH Transit Centre
"1401", // Stadium Transit Centre
"1044", // ==
"1209", // == Coliseum Transit Centre LAST
"1205", // Coliseum Transit Centre
"7205", // Belvedere Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"7205", // Belvedere Transit Centre
"1357", // !=
"1209", // Coliseum Transit Centre
"1148", // !=
"1402", // Stadium Transit Centre
"1032", // !=
"1105", // == Kingsway RAH Transit Centre
"1053", // !=
"5077", // ==
"5202", // == Westmount Transit Centre LAST
"5209", // Westmount Transit Centre
"5112", // !=
"5101", // Jasper Place Transit Centre
})) //
.compileBothTripSort());
map2.put(126l, new RouteTripSpec(126l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5013", "8882", "8590", "5928", "5208" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5208", "5389", "8500", "8952", "5013" })) //
.compileBothTripSort());
map2.put(127l, new RouteTripSpec(127l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELVEDERE, // 7205
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT) // 5204
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5204", "1110", "1401", "1209", "1205", "7205" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7205", "1209", "1402", "1110", "1105", "5204" })) //
.compileBothTripSort());
map2.put(128l, new RouteTripSpec(128l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY) //
.addTripSort(MDirectionType.NORTH.intValue(), // CASTLE_DOWNS
Arrays.asList(new String[] { //
"2890", // 114 Street & 89 Avenue
"2002", // University Transit Centre
"2638", // 114 Street & 85 Avenue
"5206", // Westmount Transit Centre
"6191", // !=
"6333", // <> 127 Street & 129 Avenue
"6553", // !=
"6458", // !=
"6006", // Castle Downs Transit Centre END >> UNIVERSITY
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), // UNIVERSITY
Arrays.asList(new String[] { //
"6006", // Castle Downs Transit Centre
"6137", // !=
"6366", // ++ 127 Street & 131 Avenue
"6333", // <> 127 Street & 129 Avenue
"6435", // !=
"6369", // 127 Street & 129 Avenue
"6289", // ++
"2890", // 114 Street & 89 Avenue
})) //
.compileBothTripSort());
map2.put(129l, new RouteTripSpec(129l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5012", "8740", "8740", "5960", "5208" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5208", "5936", "8740", "5012" })) //
.compileBothTripSort());
map2.put(130l, new RouteTripSpec(130l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"2002", // University Transit Centre
"1700", // ++
"1532", // 106 Street & 118 Avenue Loop
"1476", // ++
"7002", // Northgate Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"7002", // Northgate Transit Centre
"1532", // 106 Street & 118 Avenue Loop
"1855", // ++
"2890", // 114 Street & 89 Avenue
"2002", // University Transit Centre
})) //
.compileBothTripSort());
map2.put(133L, new RouteTripSpec(133L, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY, // S_CAMPUS_FT_EDM
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"8606", // Lewis Farms Transit Centre
"8602", // Lewis Farms Transit Centre
"5001", // West Edmonton Mall Transit Centre
"2748", // ==
"2982", // !=
"22354", // !=
"2638", // ==
"2890", // 114 Street & 89 Avenue
"2002", // University Transit Centre
"2890", // 114 Street & 89 Avenue
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"2890", // 114 Street & 89 Avenue
"2002", // University Transit Centre
"5010", // West Edmonton Mall Transit Centre
"8602", // Lewis Farms Transit Centre
"8606", // Lewis Farms Transit Centre
})) //
.compileBothTripSort());
map2.put(134l, new RouteTripSpec(134l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "1251", "1237", "7002" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7002", "1372", "1251" })) //
.compileBothTripSort());
map2.put(136L, new RouteTripSpec(136L, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, THE_HAMPTONS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"8583", // 215 Street & Hope Road
"8089", // Glastonbury Boulevard & 69 Avenue
"8033", // ++
"8602", // ++
"5010", // West Edmonton Mall Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"5010", // West Edmonton Mall Transit Centre
"8609", // ++
"8135", // ++
"8177", // ++
"8046", // 199 Street & 62 Avenue
"8583", // 215 Street & Hope Road
})) //
.compileBothTripSort());
map2.put(137l, new RouteTripSpec(137l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5010", "8882", "6850", /* + */"7011" /* + */, "7002", "7908" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7908", "7011", "6118", "8861", "5010" })) //
.compileBothTripSort());
map2.put(138l, new RouteTripSpec(138l, // TODO not exactly: same loop for the 2 trips
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEDGEWOOD) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /*-"5627"-*//* + */"5968"/* + */, /* + */"5888"/* + */, /* + */"5789"/* + */, //
"5983", "5747", "2707" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2707", "5747", "5719",//
/* + */"5627"/* + */, /* + */"5858"/* + */, /* + */"5968"/* + *//*-"5789"-*/})) //
.compileBothTripSort());
map2.put(139l, new RouteTripSpec(139l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, THE_HAMPTONS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "8457", "8106", "8033", "2707" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2707", "8135", "8457", "8460" })) //
.compileBothTripSort());
map2.put(140l, new RouteTripSpec(140l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAGO_LINDO, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "1251", "1040", "7003", "7010",
/* + */"7748"/* + */, //
"7377" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7377",
/* + */"7042"/* + */, //
"7003", "1380", "1251" })) //
.compileBothTripSort());
map2.put(141l, new RouteTripSpec(141l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, COLISEUM) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1204", "1561", "1002", "1003" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1002", "1003", "1031", "1204" })) //
.compileBothTripSort());
map2.put(142l, new RouteTripSpec(142l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, COLISEUM) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1207", "1521", "1001" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1001", "1367", "1207" })) //
.compileBothTripSort());
map2.put(143l, new RouteTripSpec(143l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MONTROSE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, KINGSWAY) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1111", "1476", "1441", "1205", "1260" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1260", "1213", "1278", "1075", "1111" })) //
.compileBothTripSort());
map2.put(145l, new RouteTripSpec(145l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAGO_LINDO, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, _82_ST_132_AVE) // EAUX_CLAIRES
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"6315", "7377",
/* + */"7388"/* + */
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
/* + */"7388"/* + */, //
/* + */"7483"/* + */, //
"6315", "6317", "7358", "7165" //
})) //
.compileBothTripSort());
map2.put(149l, new RouteTripSpec(149l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "6308", "7736", "7113", "7904" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7904", "7153", "7959", "6308" })) //
.compileBothTripSort());
map2.put(150l, new RouteTripSpec(150l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"5007", "5107", //
"5207", //
"5549", // ==
"1759", // !=
"1867", // !=
"1665", // !=
"6122", // ==
"6333", "7011", "7010", "6303" //
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"6303", "7011", //
"-77862", //
"6369", //
"6289", // ==
"5173", // !=
"6372", // !=
"1932", // !=
"5090", // ==
"5203", "5102", "5007" //
})) //
.compileBothTripSort());
map2.put(151l, new RouteTripSpec(151l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KING_EDWARD_PK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { // CASTLE_DOWNS
"2253", // 71 Street & 77 Avenue
"2432", // 91 Street & 82 Avenue
"1251", // == 102 Street & MacDonald Drive
"1346", // 101 Street & 101A Avenue
"1237", // 101 Street & 117 Avenue
"1043", // != 97 St & Yellowhead Tr Nearside
"6496", // == 97 Street & 128 Avenue
"6421", // != 102 Street & 127 Avenue
"6571", // ==
"6333", // !=
"6553", // !=
"6020", // !=
"6434", // !=
"6292", // != 127 Street & 129 Avenue LAST
"6328", // !=
"6132", // !=
"6487", // ==
"6333", // 127 Street & 129 Avenue
"6004", // Castle Downs Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { // KING_EDWARD_PK
"6004", // Castle Downs Transit Centre
"6366", // !=
"6292", // 127 Street & 129 Avenue
"6123", // !=
"6116", // == 103 Street & 127 Avenue
"6496", // == 97 Street & 128 Avenue LAST
"6266", // 101 Street & 128 Avenue
"1372", // 101 Street & 117 Avenue
"1243", // == 101 Street & 101A Avenue
"1251", // == 102 Street & MacDonald Drive LAST
"1142", // 101 Street & MacDonald Drive nearside CONTINUE
"2079", // 91 Street & 83 Avenue
"2253", // 71 Street & 77 Avenue
})) //
.compileBothTripSort());
map2.put(152l, new RouteTripSpec(152l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELVEDERE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7003", "7074", "7208" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7208", "7221", "7003" })) //
.compileBothTripSort());
map2.put(153l, new RouteTripSpec(153l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELVEDERE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7008", /* + */"7143"/* + */, "7204" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7204", /* + */"7043"/* + */, "7008" })) //
.compileBothTripSort());
map2.put(154l, new RouteTripSpec(154l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELVEDERE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7009", "7592", "7202" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7202", "7123", "7009" })) //
.compileBothTripSort());
map2.put(155l, new RouteTripSpec(155l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, RAPPERSWILL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "6823", /* + */"6416"/* + */, "6313" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "6313", /* + */"6078"/* + */, "6823" })) //
.compileBothTripSort());
map2.put(157l, new RouteTripSpec(157l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, REMAND_CTR) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "6379",
/* + */"6077"/* + */, //
"6302" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "6302",
/* + */"6720"/* + */, //
"6379" })) //
.compileBothTripSort());
map2.put(160l, new RouteTripSpec(160l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, OXFORD, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, GOV_CTR) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "1304", "1820", "6348", "6243", "6835", "6676" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "6835", "6676", "6442", "6594", "1304" })) //
.compileBothTripSort());
map2.put(161l, new RouteTripSpec(161l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MAC_EWAN_GOV_CTR) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"1309", // != Government Transit Centre START
"1035", // !=
"1824", // != 108 Street & 104 Avenue START
"1845", // !=
"1271", // ==
"7579", // !=
"7009", // <> Northgate Transit Centre
"66112", // !=
"6580", // ++
"6007", // Castle Downs Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"6007", // Castle Downs Transit Centre
"6396", // ++
"6141", // == !=
"7009", // != <> Northgate Transit Centre => CASTLE_DOWNS
"7003", // != Northgate Transit Centre
"1673", // ==
"1740", // !=
"1989", // != 108 Street & 104 Avenue END
"1622", // !=
"1309", // !≃ Government Transit Centre END
})) //
.compileBothTripSort());
map2.put(162l, new RouteTripSpec(162l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MAC_EWAN_GOV_CTR) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"1309", // != Government Transit Centre START
"1035", // !=
"1824", // != 108 Street & 104 Avenue START
"1845", // !=
"1271", // ==
"7579", // !=
"6311", // <> Eaux Claires Transit Centre
"6033", // ++
"6008", // Castle Downs Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"6008", // Castle Downs Transit Centre
"6340", // ++
"6362", // ==
"6311", // != <> Eaux Claires Transit Centre => CASTLE_DOWNS
"6310", // != Eaux Claires Transit Centre
"1622", // ==
"1740", // !=
"1989", // != 108 Street & 104 Avenue END
"1964", // !=
"1309", // != Government Transit Centre END
})) //
.compileBothTripSort());
map2.put(163l, new RouteTripSpec(163l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CHAMBERY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"6312", /* + */"7463"/* + */, /* + */"7748"/* + */, //
/* + */"7381"/* + */, "6194", /* + */"6767"/* + */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
/* + */"6767"/* + */, "6598", /* + */"6854"/* + */, /* + */"6147"/* + */, /* + */"6362"/* + */, //
/* + */"6074"/* + */, /* + */"6076"/* + */, "6236", /* + */"7482"/* + */, "6312" })) //
.compileBothTripSort());
map2.put(164l, new RouteTripSpec(164l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RAPPERSWILL, // CANOSSA, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"7015", // == Northgate Transit Centre
"66112", // !=
"6612", // !=
"6148", // !=
"6235", // !=
"6468", // ++
"6356", // ==
"6001", // Castle Downs Transit Centre
"6783", // ==
"6949", // !=
"6205", // <> 115 Street & 175 Avenue
"6950", // !=
"6202" // 127 Street & 167 Avenue
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"6202", // 127 Street & 167 Avenue
"6871", // !=
"6105", // !=
"6205", // <>
"6338", // !=
"6184", // ==
"6575", // ==
"6340", // !=
"6584", // !=
"6077", // !=
"6236", // !=
"6021", // !=
"6225", // ==
"6010", // Castle Downs Transit Centre
"6101", // ==
"6478", // !=
"6588", // == Griesbach Road & 146 Avenue
"6404", // != Sir Arthur Currie Way & Greisbach Road
"6141", // != 102 Street & 137 Avenue
"6125", // != 104 Street & Griesbach Road
"6361", // != 97 Street & 144 Avenue
"7015", // == Northgate Transit Centre
})) //
.compileBothTripSort());
map2.put(165l, new RouteTripSpec(165l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, _85_ST_132_AVE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "7496", "6130", "6522", "6011", "6127" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "6074", "6010", "6396", "6579", "7299" })) //
.compileBothTripSort());
map2.put(166l, new RouteTripSpec(166l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, GRIESBACH) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"6112", // Pegasus Boulevard & Stan Walters Avenue
/* + */"6612"/* + */, //
"7015" // Northgate Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"7015", // Northgate Transit Centre
/* + */"6260"/* + */, //
"6112" // Pegasus Boulevard & Stan Walters Avenue
})) //
.compileBothTripSort());
map2.put(167l, new RouteTripSpec(167l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS + SLASH + _82_ST, // Castle Downs-82 St
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, _95_ST_132_AVE) // 95A Street & 132 Avenue
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
/* no stops */
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"6039", // 100 Street & 158 Avenue
"6317", // Eaux Claires Transit Centre
"7353", // 87 Street & 144 Avenue
"7060", // 95A Street & 132 Avenue
})) //
.compileBothTripSort());
map2.put(168l, new RouteTripSpec(168l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "7011", "6243", "6619", "6835", //
"6725", //
"6003", "6305" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "6305", "6011", //
/* + */"6228"/* + */, //
/* + */"6698"/* + */, //
/* + */"6725"/* + */, //
/* + */"6256"/* + */, //
/* + */"6566"/* + */, //
/* + */"6261"/* + */, //
/* + */"6114"/* + */, //
"6676", "6853", "6442", "7011" })) //
.compileBothTripSort());
map2.put(169l, new RouteTripSpec(169l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CANOSSA, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"7015", // == Northgate Transit Centre
"7448", "6286", // !=
"6148", "6468", // !=
"6356", // ==
"6001", "6166", "6194", //
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"6194", "6456", "6010", //
"6101", // ==
"6478", "6343", // !=
"6460", "6536", // !=
"6361", // ==
"7015", // Northgate Transit Centre
})) //
.compileBothTripSort());
map2.put(180l, new RouteTripSpec(180l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"1824", "6304", "7736", "7456", "7207", "7642", "1002" //
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1002", "7639", "7203", "7384", "7959",
/* + */"6304"/* + */, //
"6317",
/* + */"6594"/* + */, //
/* + */"1850"/* + */, //
"1989" //
})) //
.compileBothTripSort());
map2.put(181l, new RouteTripSpec(181l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELVEDERE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7206", //
/* + */"7650"/* + */, //
/* + */"7186"/* + */, //
"7384", "7241", "7604", "7901" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7901", "7414", "7400", "7456", //
/* + */"7164"/* + */, //
/* + */"7479"/* + */, //
/* + */"7650"/* + */, //
/* + */"7265"/* + */, //
/*-"7186",-*///
"7206" })) //
.compileBothTripSort());
map2.put(182l, new RouteTripSpec(182l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, FRASER, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7003", "7186", "7104", "7470" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7470", "7105", "7572", "7003" })) //
.compileBothTripSort());
map2.put(183l, new RouteTripSpec(183l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "1002", "7668", "7885", "7102" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7102", "7983", "7729", "1002" })) //
.compileBothTripSort());
map2.put(184l, new RouteTripSpec(184l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EVERGREEN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "7903", "7262", "7128" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7128", "7262", "7903" })) //
.compileBothTripSort());
map2.put(185l, new RouteTripSpec(185l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "1002",
/* + */"7954"/* + */, //
"7102" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7102",
/* + */"7744"/* + */, //
"1002" })) //
.compileBothTripSort());
map2.put(186l, new RouteTripSpec(186l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, FRASER, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7358", "7286", "7206", "7104", "7470" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7470", "7105", "7205", "7120", "7011" })) //
.compileBothTripSort());
map2.put(187l, new RouteTripSpec(187l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, KERNOHAN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LONDONDERRY) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7230", "7103", "7756", "7943" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7943", "7103", "7102", "7185" })) //
.compileBothTripSort());
map2.put(188l, new RouteTripSpec(188l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LONDONDERRY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "6309", "7230", "7186", "7907", "7729" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7668", "7904", "7549", "7185", "7188", "6309" })) //
.compileBothTripSort());
map2.put(190l, new RouteTripSpec(190l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "6306", "7763", "7803", "7054", "7906" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7906", "7384", "7815", "7674", "6306" })) //
.compileBothTripSort());
map2.put(191l, new RouteTripSpec(191l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KLARVATTEN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "6307", //
/* + */"7865"/* + */, //
"7827" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
/* + */"7827"/* + */, //
/* + */"7825"/* + */, //
"7434", //
/* + */"7795"/* + */, //
"7779", "6307" })) //
.compileBothTripSort());
map2.put(192l, new RouteTripSpec(192l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BRINTNELL, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "7909",
/* + */"7512"/* + */, //
"7984" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7984",
/* + */"7603"/* + */, //
"7909" })) //
.compileBothTripSort());
map2.put(193l, new RouteTripSpec(193l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BRINTNELL, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "7910",
/* + */"7992"/* + */, //
"7414" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7414",
/* + */"77280"/* + */, //
"7910" })) //
.compileBothTripSort());
map2.put(194l, new RouteTripSpec(194l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SCHONSEE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "6308", "7677", "7919" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7919", "7674", "6308" })) //
.compileBothTripSort());
map2.put(195l, new RouteTripSpec(195l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_CONACHIE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"7907", // West Clareview Transit Centre
"7879", // ++
"7308" // 59A Street & McConachie Way
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"7308", // 59A Street & McConachie Way
"77335", // ==
"77428", // !=
"7018", // McConachie Boulevard & 176 Avenue
"77607", // !=
"77424", // ==
"77436", // ==
"7907", // West Clareview Transit Centre
})) //
.compileBothTripSort());
map2.put(197l, new RouteTripSpec(197l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EDM, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SPRUCE_GRV) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "8737", "8785", "8761", "5415", //
/* + */"1595"/* + */, //
"1223", "1850", "1479" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1570", "1679", "1227", //
/* + */"1187"/* + */, //
"5389", "8730", "8743", "8737" })) //
.compileBothTripSort());
map2.put(198l, new RouteTripSpec(198l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, FT_SASKATCHEWAN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7908",
/* + */"77175"/* + */, //
"7405" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7405", "7926", "7908" })) //
.compileBothTripSort());
map2.put(199l, new RouteTripSpec(199l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EDM_GARRISON, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "6316",
/* + */"7873"/* + */, //
"7895" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7895", "7873", "6316" })) //
.compileBothTripSort());
map2.put(211l, new RouteTripSpec(211l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW_WEST_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1643", "1321", "7903" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7903", "1620", "1643" })) //
.compileBothTripSort());
map2.put(301l, new RouteTripSpec(301l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"4204", // Century Park Transit Centre
"4065", "4547", "4186", //
"2203", // Southgate Transit Centre
"2211" // Southgate Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"2203", // Southgate Transit Centre
"2211", // Southgate Transit Centre
"4275", "4543", "4443", //
"4204", // Century Park Transit Centre
})) //
.compileBothTripSort());
map2.put(302l, new RouteTripSpec(302l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EVERGREEN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "7103",
/* + */"7689",/* + *///
"7262", "7654", "7128" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7128", "7654", "7591",
/* + */"7855",/* + *///
"7103" })) //
.compileBothTripSort());
map2.put(303l, new RouteTripSpec(303l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MISTATIM_IND) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"7011", // Northgate Transit Centre
"6348", // ==
"6472", // ><
"6930", // ><
"6484", // ==
"6233", // ==
"6183", // 142 Street & 134 Avenue
"6727" // 159 Street & 131 Avenue Nearside
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"6727", // 159 Street & 131 Avenue Nearside
"6677", // ==
"66139", // !=
"6178", // ==
"6524", // ++
"6472", // ><
"6930", // ><
"7579", // ++
"7011" // Northgate Transit Centre
})) //
.compileBothTripSort());
map2.put(304l, new RouteTripSpec(304l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHPARK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4569",
/* + */"2076",/* + *///
"2218" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2218", "2888",
/* + */"4183",/* + *///
"4569" })) //
.compileBothTripSort());
map2.put(305l, new RouteTripSpec(305l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_GATES) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5668", "5082", "5528", "", "5208", "5214" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5214", "1481", "1861", "5205", "5055", "5335", "5668" })) //
.compileBothTripSort());
map2.put(306l, new RouteTripSpec(306l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MEADOWS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BONNIE_DOON) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"2808", // Bonnie Doon Safeway
"2805", // ++ Girard Road & 76 Avenue
"2415", // !=
"2693", // == 17 Street & Oak Ridge Drive
"2259", // ==
"22189", // !=
"3706" // Meadows Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"3706", // Meadows Transit Centre
"22188", // !=
"2693", // == 17 Street & Oak Ridge Drive
"2259", // ==
"22178", // !=
"2804", // ++
"2159", // ++
"2808", // Bonnie Doon Safeway
})) //
.compileBothTripSort());
map2.put(307l, new RouteTripSpec(307l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, GOLD_BAR, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BONNIE_DOON) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "2196", "2304", "2012",
/* + */"2068"/* + */, //
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] {
/* + */"2068"/* + */, //
"2475", "2305", "2196" //
})) //
.compileBothTripSort());
map2.put(308l, new RouteTripSpec(308l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, RIVERDALE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"1123",
/* + */"1280"/* + */, //
/* + */"1549"/* + */, //
"1893" //
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"1893",
/* + */"1510"/* + */, //
/* + */"1953"/* + */, //
/* + */"1914"/* + */, //
"1254",
/* + */"1498"/* + */, //
/* + */"1120"/* + */, //
"1262", "1123" //
})) //
.compileBothTripSort());
map2.put(309l, new RouteTripSpec(309l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KINGSWAY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RIVERDALE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"1254", // 92 Street & 101A Avenue
"1620", // == 101 Street & Jasper Avenue
"1673", // != 103 Street & Jasper Avenue
"1964", // != 107 Street & Jasper Avenue
"1949", // != 103 Street & 100 Avenue
"1708", // != 105 Street & 100 Avenue
"1941", // == 107 Street & 100 Avenue
"1705", // !=
"1293", // <> 110 Street & 100 Avenue
"1961", // 1=
"1942", // ++
"1960", // ++
"1978", // ++
"1104", // ++
"1366" // 101 Street & 111 Avenue
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"1366", // 101 Street & 111 Avenue
"1455", // Kingsway Mall
"1834", // ++
"1141", // ++
"1856", // !=
"1293", // == <> 110 Street & 100 Avenue
"1711", // != 107 Street & 100 Avenue
"1271", // != 105 Street & Jasper Avenue
"1769", // != 107 Street & 100 Avenue
"1299", // != 103 Street & Jasper Avenue
"1322", // == 103 Street & Jasper Avenue
"1256", // Thornton Court & Jasper Avenue
"1893", // ++
"1254", // 92 Street & 101A Avenue
})) //
.compileBothTripSort());
map2.put(310l, new RouteTripSpec(310l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RIO_TERRACE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5174", "5302", "5383", "5105" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5105", "5491", "5301", "5174" })) //
.compileBothTripSort());
map2.put(311l, new RouteTripSpec(311l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5011", "5222", "5836", "5105" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5105", "5851", "5325", "5011" })) //
.compileBothTripSort());
map2.put(312l, new RouteTripSpec(312l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LONDONDERRY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7008", "7754", "7944" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7944", "7754", "7008" })) //
.compileBothTripSort());
map2.put(313L, new RouteTripSpec(313L, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, PARKALLEN) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"2795", // 112 Street & 65 Avenue nearside
"2689", // ++
"2002", // University Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"2002", // University Transit Centre
"2971", // 117 Street & University Avenue
"2001", // University Transit Centre
"2690", // ++
"2795", // 112 Street & 65 Avenue nearside
})) //
.compileBothTripSort());
map2.put(315L, new RouteTripSpec(315L, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WINTERBURN_IND, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"8609", // Lewis Farms Transit Centre
"8536", // ==
"8087", // !=
"8175", // <> 215 Street & Secord Boulevard
"8146", // <>
"8123", // <>
"8066", // <> 217 Street & 94B Avenue
"8080", // <> Secord Drive & Secord Boulevard
"8061", // <> 218 Street & Secord Blvd
"8078", // !=
"8694", // !=
"8163", // <> 215 St & Westview Blvd
"8955", // <>
"8938", // <>
"8989", // <> Lakeview Drive & Westview Boulevard
"8975", // <> Westview Village & Lakeview Drive
"8144", // !=
"8727", // 220 Street & 115 Avenue
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"8727", // 220 Street & 115 Avenue
"8369", // !=
"8163", // <> 215 St & Westview Blvd
"8955", // <>
"8938", // <>
"8989", // <> Lakeview Drive & Westview Boulevard
"8975", // <> Westview Village & Lakeview Drive
"8945", // !=
"8065", // !=
"8175", // <> 215 Street & Secord Boulevard
"8146", // <>
"8123", // <>
"8066", // <> 217 Street & 94B Avenue
"8080", // <> Secord Drive & Secord Boulevard
"8061", // <> 218 Street & Secord Blvd
"8068", // !=
"8609", // Lewis Farms Transit Centre
})) //
.compileBothTripSort());
map2.put(316l, new RouteTripSpec(316l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HAWKS_RDG, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "8603", "6824", "6408", "6709" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "6709", "6617", "6825", "8603" })) //
.compileBothTripSort());
map2.put(317l, new RouteTripSpec(317l, // TODO better (same stops in both trips in different orders)
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WINTERBURN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"8989", "8967", "8943", "8975", "8927", "8163", "8846", "8975", "8945", //
"8941", "5105" //
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"5105", "8904", //
"8694", "8927", "8163", "8846", "8975", "8927", "8163", "8955", "8938", "8989" //
})) //
.compileBothTripSort());
map2.put(318l, new RouteTripSpec(318l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, COLISEUM) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1208", "1070", "1001", "1491", "1002" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1002", "1340", "1208" })) //
.compileBothTripSort());
map2.put(321l, new RouteTripSpec(321l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA_IND) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "3733", "3744", "2106" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2106",
/* + */"3481"/* + */, //
"3733" })) //
.compileBothTripSort());
map2.put(322l, new RouteTripSpec(322l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HOLYROOD, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BONNIE_DOON) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2808", "2585", "2841",
/* + */"2246"/* + */, //
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] {
/* + */"2246"/* + */, //
"2613", "2808" })) //
.compileBothTripSort());
map2.put(323l, new RouteTripSpec(323l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BONNIE_DOON, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, RITCHIE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "2419", "2313", "2808" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2808",
/* + */"2294"/* + */, //
"2419" })) //
.compileBothTripSort());
map2.put(324l, new RouteTripSpec(324l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, AMBLESIDE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "9092", "9630", "4201" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "4201", "9635", "9092" })) //
.compileBothTripSort());
map2.put(325l, new RouteTripSpec(325l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WINDERMERE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "9632", "9526", "4801" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "4801",
/* + */"4938"/* + */, //
"9632" })) //
.compileBothTripSort());
map2.put(327l, new RouteTripSpec(327l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELGRAVIA) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "2765", "2680", "2821" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2821",
/* + */"2648"/* + */, //
"2765" })) //
.compileBothTripSort());
map2.put(330l, new RouteTripSpec(330l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4811", "4597", "4153", "2704", })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2704", "4021", "4494", "4811" })) //
.compileBothTripSort());
map2.put(331l, new RouteTripSpec(331l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CHAPPELLE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "9285",
/* + */"9270"/* + */, //
/* + */"9271"/* + */, //
/* + */"9272"/* + */, //
/* + */"9366"/* + */, //
/* + */"9281"/* + */, //
/* + */"9382"/* + */, //
"4216" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4216",
/* + */"9044"/* + */, //
/* + */"9187"/* + */, //
/* + */"9273"/* + */, //
/* + */"9274"/* + */, //
/* + */"9368"/* + */, //
/* + */"9263"/* + */, //
/* + */"9264"/* + */, //
/* + */"9265"/* + */, //
"9285" })) //
.compileBothTripSort());
map2.put(333l, new RouteTripSpec(333l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ROSENTHAL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "8167",
/* + */"8852"/* + */, //
"8604" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "8604",
/* + */"8168"/* + */, //
"8167" })) //
.compileBothTripSort());
map2.put(334l, new RouteTripSpec(334l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "4809",
/* + */"4626"/* + */, //
"4215" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "4215",
/* + */"4642"/* + */, //
"4809" })) //
.compileBothTripSort());
map2.put(336l, new RouteTripSpec(336l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4810", "4455", "4069", "2208", })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2208", "4167", "4129", "4810" })) //
.compileBothTripSort());
map2.put(337l, new RouteTripSpec(337l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "4802", "4117", "4110", "4215", })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "4215", "4941", "4856", "4802" })) //
.compileBothTripSort());
map2.put(338l, new RouteTripSpec(338l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BLACKBURNE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "9226", "4201", "4813", "4597", "4034", "2207", })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2207", "4034", "4042", "4805", "4204", "9226" })) //
.compileBothTripSort());
map2.put(339l, new RouteTripSpec(339l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RUTHERFORD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "9251", "9685", "4213", })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4213", "9756", "9251" })) //
.compileBothTripSort());
map2.put(340l, new RouteTripSpec(340l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"3217", // Mill Woods Transit Centre
"3122", // == Hewes Way & 27 Avenue
"3244", // != Youville Drive W & 28 Avenue
"3338", // != 65 Street & 28 Avenue
"3462", // != Youville Drive W & 28 Avenue
"3498", // != 66 Street & 31 Avenue
"3264", // == 67 Street & 28 Avenue
"3482", // ++
"2102", // Millgate Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"2102", // Millgate Transit Centre
"3448", // ++
"3217", // Mill Woods Transit Centre
})) //
.compileBothTripSort());
map2.put(347l, new RouteTripSpec(347l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ALLARD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "9717", "9685", "4213", })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4213", "9666", "9717" })) //
.compileBothTripSort());
map2.put(360l, new RouteTripSpec(360l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ORCHARDS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "9306",
/* + */"9050"/* + */, //
"4216", })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4216",
/* + */"9051"/* + */, //
"9306" })) //
.compileBothTripSort());
map2.put(361l, new RouteTripSpec(361l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3210", "3585", "2105", })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2105", "3529", "3210" })) //
.compileBothTripSort());
map2.put(362l, new RouteTripSpec(362l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHWOOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3082",
/* + */"3149"/* + */, //
"3211", })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "3211", "3009", "3082" })) //
.compileBothTripSort());
map2.put(363l, new RouteTripSpec(363l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHWOOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3066", "3003", "3215", })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "3215",
/* + */"3174"/* + */, //
"3066" })) //
.compileBothTripSort());
map2.put(370l, new RouteTripSpec(370l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3206", "3957", "3796", "2106", })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2106", "3748", "3950", "3206" })) //
.compileBothTripSort());
map2.put(380l, new RouteTripSpec(380l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, QUARRY_RDG, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7903",
/* + */"7587"/* + */, //
"7213" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7213",
/* + */"77430"/* + */, //
"7903" })) //
.compileBothTripSort());
map2.put(381l, new RouteTripSpec(381l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HOLLICK_KENYON, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "7905",
/* + */"7982"/* + */, //
"7151", })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7151",
/* + */"7808"/* + */, //
"7905" })) //
.compileBothTripSort());
map2.put(399l, new RouteTripSpec(399l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, COLISEUM, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CONCORDIA) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "1517",
/* + */"1015"/* + */, //
"1209" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1209",
/* + */"1131"/* + */, //
"1517" })) //
.compileBothTripSort());
map2.put(512l, new RouteTripSpec(512l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "1336", "1408", "1211", "7212", "7903" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7903", "7212", "1210", "1407", "1620" })) //
.compileBothTripSort());
map2.put(517l, new RouteTripSpec(517l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW_WEST_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, COLISEUM) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"1211", "7903" //
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"7903", "1211" //
})) //
.compileBothTripSort());
map2.put(560l, new RouteTripSpec(560l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SPRUCE_GRV) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"5389", // 154 Street & 118 Avenue
"8730", // == Century Road & Grove Drive
"8743", // == Aspenglen Drive & Grove Drive
"8737", // == King Street & McLeod Avenue
"8785", // == Century Road & McLeod Avenue
"8761", // == Century Road & Grove Drive
"1890", // 109 Street & Princess Elizabeth Avenue
"1983", // 105 Street & 104 Avenue
"1479", // 97 Street & 103A Avenue
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"1570", // 101 Street & 103A Avenue
"1679", // 105 Street & 104 Avenue
"1860", // 109 Street & Princess Elizabeth Avenue
"8730", // == Century Road & Grove Drive
"8743", // == Aspenglen Drive & Grove Drive
"8737", // == King Street & McLeod Avenue
"8785", // == Century Road & McLeod Avenue
"8761", // == Century Road & Grove Drive
"5415", // 154 Street & 119 Avenue
})) //
.compileBothTripSort());
map2.put(561l, new RouteTripSpec(561l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NAIT, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Acheson") //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"8169", "1890" //
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"1860", "8169" //
})) //
.compileBothTripSort());
map2.put(562l, new RouteTripSpec(562l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, // WEST_EDM_MALL
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SPRUCE_GRV) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"8207", // Jennifer Heil Way & Grove Drive
"5219", // 175 Street & 87 Avenue
"2708" // South Campus Transit Centre Fort Edmonton Park
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"2708", // South Campus Transit Centre Fort
"5014", // West Edmonton Mall Transit Centre
"8207", // Jennifer Heil Way & Grove Drive
})) //
.compileBothTripSort());
map2.put(577l, new RouteTripSpec(577l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHLANDS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, _84_ST_111_AVE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1408",
/* + */"1094"/* + */, //
"1371" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1371",
/* + */"1180"/* + */, //
"1408" })) //
.compileBothTripSort());
map2.put(580l, new RouteTripSpec(580l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, FT_SASKATCHEWAN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW_WEST_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"7908", // West Clareview Transit Centre
"77162", // Southfort Drive & South Point Shopping Fort Sask
"7405", // Dow Centennial Centre Fort Sask
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"7405", // Dow Centennial Centre Fort Sask
"7926", // 95 Street & 96 Avenue Fort Sask
"7908" // West Clareview Transit Centre
})) //
.compileBothTripSort());
map2.put(589l, new RouteTripSpec(589l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EDM_WASTE_MGT_CTR, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, COLISEUM) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1211", "7700",
/* + */"7701"/* + */, //
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] {
/* + */"7700"/* + */, //
"7701", "1211" })) //
.compileBothTripSort());
map2.put(591l, new RouteTripSpec(591l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHLANDS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2307", "2359", "1371" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1371", "2594", "2307" })) //
.compileBothTripSort());
map2.put(594l, new RouteTripSpec(594l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Valley Zoo", //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"5219", // 175 Street & 87 Avenue
"5332", // 152 Street & 87 Avenue
"5095", // 133 Street & Buena Vista Road
"5015" // Valley Zoo Parking Lot
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"5015", // Valley Zoo Parking Lot
"5095", // 133 Street & Buena Vista Road
"5610", // 155 Street & 87 Avenue
"5219" // 175 Street & 87 Avenue
})) //
.compileBothTripSort());
map2.put(595l, new RouteTripSpec(595l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, FT_EDM) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"4476", // Fort Edmonton
"2978", // ++
"2706" // South Campus Transit Centre Fort Edmonton Park
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"2706",// South Campus Transit Centre Fort Edmonton Park
"22160", // ++
"4476" // Fort Edmonton
})) //
.compileBothTripSort());
map2.put(596l, new RouteTripSpec(596l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, VLY_ZOO_FT_EDM) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5015", "4476", "2706" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2706", "4476", "5015" })) //
.compileBothTripSort());
map2.put(599l, new RouteTripSpec(599l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EDM_GARRISON, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"6316", // Eaux Claires Transit Centre
"7991", // 97 Street & 176 Avenue
"7873", // C Ortona Road & Churchill Avenue Garrison
"7681", // Ortona Road & Ubique Avenue Garrison
"7412", // Korea Road & Ortona Road Garrison
"7895" // B Hindenburg Line Road & Churchill Avenue Garrison
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"7895", // B Hindenburg Line Road & Churchill Avenue Garrison
"7406", // Highway 28A & Mons Avenue Garrison
"7873", // C Ortona Road & Churchill Avenue Garrison
"7681", // Ortona Road & Ubique Avenue Garrison
"6854", // 97 Street & 176 Avenue
"6316" // Eaux Claires Transit Centre
})) //
.compileBothTripSort());
map2.put(601l, new RouteTripSpec(601l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ROSS_SHEPPARD, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEDGEWOOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5627", "5908", "5983", "5548", "5392" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(602l, new RouteTripSpec(602l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ROSS_SHEPPARD, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ORMSBY_PL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5755", "5828", "5725", "5874", "5548", "5392" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(606l, new RouteTripSpec(606l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CARLTON, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "6603", "6853", "6293", "6369", "5211", "5548" })) //
.compileBothTripSort());
map2.put(607l, new RouteTripSpec(607l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"6822", "6293", "6369", "5211", "5548" //
})) //
.compileBothTripSort());
map2.put(608l, new RouteTripSpec(608l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BEAUMARIS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"6593", "6027", //
"6369", // ==
"6372", "1664", // !=
"5173", // !=
"5090", // ==
"5211", "5548" //
})) //
.compileBothTripSort());
map2.put(609l, new RouteTripSpec(609l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BATURYN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"6593", "6053", //
"6369", // ==
"6372", "1664", // !=
"6289", "5173", // !=
"5356", // ==
"5211", "5548" //
})) //
.compileBothTripSort());
map2.put(610l, new RouteTripSpec(610l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DUNLUCE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "6131", "6177", "5211", "5548" })) //
.compileBothTripSort());
map2.put(612l, new RouteTripSpec(612l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, OXFORD, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "6410", "6695", "5211", "5548" })) //
.compileBothTripSort());
map2.put(613l, new RouteTripSpec(613l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RUTHERFORD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"9356", // 125 Street & 20 Avenue SW
"4213" // Century Park Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"4213", // Century Park Transit Centre
"9356" // 125 Street & 20 Avenue SW
})) //
.compileBothTripSort());
map2.put(617l, new RouteTripSpec(617l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KLARVATTEN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CARDINAL_LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7827", "7795", "7659" })) //
.compileBothTripSort());
map2.put(618l, new RouteTripSpec(618l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MATT_BERRY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JJ_BOWLEN) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7992", "7058", "7449", "7545" })) //
.compileBothTripSort());
map2.put(620l, new RouteTripSpec(620l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELVEDERE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, AOB) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7210", "1207", "2915" })) //
.compileBothTripSort());
map2.put(621l, new RouteTripSpec(621l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, AOB) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1002", "2553" })) //
.compileBothTripSort());
map2.put(635l, new RouteTripSpec(635l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5210", "1481", "1242", "1083", "1393" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(689l, new RouteTripSpec(689l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WINDSOR_PARK, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2851", "2974" })) //
.compileBothTripSort());
map2.put(697l, new RouteTripSpec(697l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"4810", // Leger Transit Centre
"4455", // Falconer Road & Riverbend Square
"4158", // Whitemud Drive SB & 53 Avenue
"2703", // South Campus Transit Centre Fort Edmonton Park
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
/* no stops *///
})) //
.compileBothTripSort());
map2.put(698l, new RouteTripSpec(698l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_PHERSON, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JACKSON_HTS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "3230", "3964" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(699l, new RouteTripSpec(699l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_PHERSON, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JACKSON_HTS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "3355", "3400", "3603" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(701l, new RouteTripSpec(701l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELMEAD) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5914", "5001" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(702l, new RouteTripSpec(702l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, PARKVIEW, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ORMSBY_PL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5881", "5828", "5725", "5198" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(703l, new RouteTripSpec(703l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CRESTWOOD, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ST_FRANCIS_XAVIER) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5421", "5038", "5174", "5941" })) //
.compileBothTripSort());
map2.put(705l, new RouteTripSpec(705l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTLAWN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"8602", // Lewis Farms Transit Centre
"5001", // West Edmonton Mall Transit Centre
"5029", // == 163 Street & 88 Avenue
"5577", // ?? 163 Street & 92 Avenue
"5991",// ?? 163 Street & 92 Avenue
"5522", // == 163 Street & 92 Avenue
"5069" // 165 Street & 95 Avenue
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(706l, new RouteTripSpec(706l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Jasper Pl TC", // _157_ST_100A_AVE
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE) // High School
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5106", "5177" })) //
.compileBothTripSort());
map2.put(707l, new RouteTripSpec(707l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, OSCAR_ROMERO, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, THE_HAMPTONS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "8670", "8135", "5986" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(708l, new RouteTripSpec(708l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, // not TC
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5012", "5874", "5221", "5109" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(709l, new RouteTripSpec(709l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MEADOWLARK) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5359", "5437", "1256" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(710l, new RouteTripSpec(710l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ROSS_SHEPPARD, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5001", "5174", "5588", "5392" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(711l, new RouteTripSpec(711l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ROSS_SHEPPARD, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "8603", "5013", "5929", "5433", "5180", "5896" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(712l, new RouteTripSpec(712l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, HILLCREST, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ORMSBY_PL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5755", "5828", "5894" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(717l, new RouteTripSpec(717l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, VICTORIA, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5001", "1426" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(723l, new RouteTripSpec(723l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HADDOW) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4941", "4319", "4815", "4069", "2974" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(725l, new RouteTripSpec(725l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KINGSWAY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "" })) // NO STOPS
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1111", "1857", "1939", "2002" })) //
.compileBothTripSort());
map2.put(726l, new RouteTripSpec(726l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ROSS_SHEPPARD, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4808", "4249", "5511", "5180", "5896" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(728l, new RouteTripSpec(728l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BROOKSIDE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "4034", "4029", "2710", "2974" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(729l, new RouteTripSpec(729l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "4815", "4246", "2974" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(730l, new RouteTripSpec(730l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAGO_LINDO, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7377", "6317", "7016", "5548" })) //
.compileBothTripSort());
map2.put(731l, new RouteTripSpec(731l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, KINGSWAY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"5204", // Westmount Transit Centre
"1105" // Kingsway RAH Transit Centre
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"1105", // Kingsway RAH Transit Centre
"5204" // Westmount Transit Centre
})) //
.compileBothTripSort());
map2.put(733l, new RouteTripSpec(733l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5001", "2714", "2002" })) //
.compileBothTripSort());
map2.put(734l, new RouteTripSpec(734l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAGO_LINDO, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MARY_BUTTERWORTH) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7377", "7483", "6236" })) //
.compileBothTripSort());
map2.put(735l, new RouteTripSpec(735l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5006", "5156", "2714", "2002" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(738l, new RouteTripSpec(738l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTH_CAMPUS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4815", "4158", "2709" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(741l, new RouteTripSpec(741l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WP_WAGNER, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KNOTTWOOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3023", "3001", "2111", "2189" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(744l, new RouteTripSpec(744l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MAYLIEWAN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, QUEEN_ELIZABETH) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7547", "7441", "7925", "7060" })) //
.compileBothTripSort());
map2.put(739l, new RouteTripSpec(739l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LENDRUM) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"2745", // 109 Street & 65 Avenue
"2002", // University Transit Centre
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
/* no stops *///
})) //
.compileBothTripSort());
map2.put(747l, new RouteTripSpec(747l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EDM_INT_AIRPORT) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "9747", "4216" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4216", "9747" })) //
.compileBothTripSort() //
.addBothFromTo(MDirectionType.SOUTH.intValue(), "4216", "4216") //
.addBothFromTo(MDirectionType.NORTH.intValue(), "9747", "9747")); //
map2.put(748l, new RouteTripSpec(748l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAGO_LINDO, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CARDINAL_LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "7377", "6309", "7353" })) //
.compileBothTripSort());
map2.put(750l, new RouteTripSpec(750l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EASTGLEN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7015", "7165", "1203", "1033" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(753l, new RouteTripSpec(753l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LONDONDERRY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7572", "7007" })) //
.compileBothTripSort());
map2.put(755l, new RouteTripSpec(755l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, OXFORD, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_O_LEARY) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "6452", "6695", "6628", "6442", "7358", "7165" })) //
.compileBothTripSort());
map2.put(756l, new RouteTripSpec(756l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LA_ZERTE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "6001", "6340", "6310", "7186" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(757l, new RouteTripSpec(757l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, _127_ST_129_AVE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
/* no stops *///
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"6369", // 127 Street & 129 Avenue
"1965", // 127 Street & 122 Avenue
"5201", // Westmount Transit Centre
"2515", // ++
"2002", // University Transit Centre
})) //
.compileBothTripSort());
map2.put(760l, new RouteTripSpec(760l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_NALLY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LARKSPUR) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3247", "3586", "2676" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(761l, new RouteTripSpec(761l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2285", "2974" })) //
.compileBothTripSort());
map2.put(762l, new RouteTripSpec(762l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_NALLY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, AVONMORE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2339", "2447", "2544", "2267", "2676" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(763l, new RouteTripSpec(763l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BONNIE_DOON, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, UNIVERSITY) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2159", "2891", "2001" })) //
.compileBothTripSort());
map2.put(764l, new RouteTripSpec(764l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2301", "2267", "1620" })) //
.compileBothTripSort());
map2.put(765l, new RouteTripSpec(765l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, RHATIGAN_RIDGE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "4461", "4249", "2974" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(767l, new RouteTripSpec(767l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3217", "3011", "2111", "2974" })) //
.compileBothTripSort());
map2.put(768l, new RouteTripSpec(768l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WP_WAGNER) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3280", "3556", "3212", "3007", "2111", "2189" })) //
.compileBothTripSort());
map2.put(769l, new RouteTripSpec(769l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WP_WAGNER, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3211", "3585", "2111", "2189" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(771l, new RouteTripSpec(771l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WP_WAGNER, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CRAWFORD_PLAINS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3143", "3217", "3002", "2111", "2189" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(773l, new RouteTripSpec(773l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WP_WAGNER, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3217", "3585", "2111", "2189" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(774l, new RouteTripSpec(774l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HARDISTY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SILVERBERRY) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3255", "3708", "3740", "3491", "2915", "2177" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(776l, new RouteTripSpec(776l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_NALLY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3217", "3796", "3586", "2676" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(777l, new RouteTripSpec(777l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MEADOWS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3703", "3560", "3217" })) //
.compileBothTripSort());
map2.put(778l, new RouteTripSpec(778l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_NALLY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3217", "3255", "3491", "2676" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(779l, new RouteTripSpec(779l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HARDISTY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3217", "3255", "3491", "2915", "2177" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(780l, new RouteTripSpec(780l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HARDISTY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3217", "3796", "3586", "2915", "2177" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(781l, new RouteTripSpec(781l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_NALLY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2105", "2551", "2676" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(782l, new RouteTripSpec(782l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KENILWORTH, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3211", "3585", "2111", "2255", "2487", "2160" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(783l, new RouteTripSpec(783l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KENILWORTH, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, GREENVIEW) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3328", "3537", "2160" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(784l, new RouteTripSpec(784l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_NALLY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MEADOWS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3255", "3708", "3740", "3491", "2676" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(785l, new RouteTripSpec(785l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HARDISTY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WILDROSE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3247", "3491", "2915", "2177" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(786l, new RouteTripSpec(786l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, AVALON, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2202", "2518" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(787l, new RouteTripSpec(787l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "2212", "2778", "2974" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(789l, new RouteTripSpec(789l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WP_WAGNER, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHWOOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3143", "3217", "2189" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(790l, new RouteTripSpec(790l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BEARSPAW) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4205", "4290", "4203", "4157", "4431", "2218" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(791l, new RouteTripSpec(791l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RUTHERFORD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "9242", "9685", "4216", "2218" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(793l, new RouteTripSpec(793l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, HARRY_AINLAY) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3217", "3008", "4490" })) //
.compileBothTripSort());
map2.put(795l, new RouteTripSpec(795l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, TWIN_BROOKS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4265", "4216", "2218" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(796l, new RouteTripSpec(796l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EASTGLEN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, FRASER) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7470", "7620", "1185" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(799l, new RouteTripSpec(799l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RIVERBEND, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "4808", "4489", "4069", "4246", "4029" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(800l, new RouteTripSpec(800l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MATT_BERRY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, QUEEN_ELIZABETH) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7383", "7288", "7298", "7140" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(802l, new RouteTripSpec(802l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE) // not TC
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5991", "5061", "5101", "5202" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5209", "5150", "5101" })) //
.compileBothTripSort());
map2.put(803l, new RouteTripSpec(803l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ORMSBY_PL, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BRUCE_SMITH) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5623", "5755", "5725" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(804l, new RouteTripSpec(804l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LA_PERLE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5548", "5012", "5024" })) //
.compileBothTripSort());
map2.put(805l, new RouteTripSpec(805l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL_TC) // WEDGEWOOD
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5061", "5069", "5002" })) //
.compileBothTripSort());
map2.put(806l, new RouteTripSpec(806l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"5181", "5107", "5207", //
"5549", // ==
"1759", // !=
"1867", "1735", // !=
"6122", // ==
"6333", "7011" //
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(807l, new RouteTripSpec(807l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BERIAULT, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5894", "5001" })) //
.compileBothTripSort());
map2.put(808l, new RouteTripSpec(808l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, // not TC
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Jasper Place (not TC)") //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5577", "5111" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(809l, new RouteTripSpec(809l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, HILLCREST, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5713", "5005" })) //
.compileBothTripSort());
map2.put(810l, new RouteTripSpec(810l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ST_ROSE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5611", "5001" })) //
.compileBothTripSort());
map2.put(811l, new RouteTripSpec(811l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, PARKVIEW, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5198", "5294", "5069", "5903", "5013" })) //
.compileBothTripSort());
map2.put(812l, new RouteTripSpec(812l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, PARKVIEW, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LA_PERLE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5656", "5011", "5024" })) //
.compileBothTripSort());
map2.put(814l, new RouteTripSpec(814l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ROSS_SHEPPARD, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5392", "5527", "5140", "5007" })) //
.compileBothTripSort());
map2.put(815l, new RouteTripSpec(815l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5548", "5005" })) //
.compileBothTripSort());
map2.put(817l, new RouteTripSpec(817l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BERIAULT, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5894", "5012" })) //
.compileBothTripSort());
map2.put(818l, new RouteTripSpec(818l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL_TC, // BERIAULT
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ORMSBY_PL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5718", "5725", "5004" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5894", "5004", "5755", "5828", "5718" })) //
.compileBothTripSort());
map2.put(819l, new RouteTripSpec(819l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5203", "5102", "5007" })) //
.compileBothTripSort());
map2.put(820l, new RouteTripSpec(820l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LY_CAIRNS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] {/* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2812", "2218" })) //
.compileBothTripSort());
map2.put(821l, new RouteTripSpec(821l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CRESTWOOD, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] {/* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5096", "5225", "5005" })) //
.compileBothTripSort());
map2.put(822l, new RouteTripSpec(822l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "1591", "1108", "1476", "7001" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "1532", "1104", "1426",
/* + */"1050"/* + */, //
"1142" })) //
.compileBothTripSort());
map2.put(824l, new RouteTripSpec(824l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, VICTORIA, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEWIS_FARMS_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1911", "5001", "8605" })) //
.compileBothTripSort());
map2.put(825l, new RouteTripSpec(825l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, VICTORIA) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "1554", "1237", "7002" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(826l, new RouteTripSpec(826l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MAGRATH) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2974", "4815", "4306", "4506" })) //
.compileBothTripSort());
map2.put(828l, new RouteTripSpec(828l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BROOKSIDE) // Ramsey Heights
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2974", "2707", "4021", "4034" })) //
.compileBothTripSort());
map2.put(829l, new RouteTripSpec(829l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2974", "4815" })) //
.compileBothTripSort());
map2.put(830l, new RouteTripSpec(830l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_NALLY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2547", "1142" })) //
.compileBothTripSort());
map2.put(832l, new RouteTripSpec(832l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5180", "6725", "6011" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(835l, new RouteTripSpec(835l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, AMISKWACIY) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"1979", //
"1669", // !=
"1974", "1735", // ==
"1799", "1759", // ==
"6122", // !=
"6333", "6579", "7003" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(836l, new RouteTripSpec(836l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KINGSWAY) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"1109", //
"1896", // ==
"-11329", // !=
"1821", "1669", "1974", // !=
"6122", // ==
"6328", "6252", "7003" //
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(837l, new RouteTripSpec(837l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, COLISEUM, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5204", "1814", "1814", "1110", "1205" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(839l, new RouteTripSpec(839l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5548", "5132", "5038", "5013" })) //
.compileBothTripSort());
map2.put(840l, new RouteTripSpec(840l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"5206", //
"1725", // ==
"1759", // !=
"1867", "1735", // !=
"6122", // ==
"6333", "6002", "6047", "6001" //
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(841l, new RouteTripSpec(841l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "6285", "6317", "7003" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7011", "6314", "6009" })) //
.compileBothTripSort());
map2.put(842l, new RouteTripSpec(842l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_O_LEARY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, OXFORD) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7120", "7496", "7060", "6348", "6243", "6337" })) //
.compileBothTripSort());
map2.put(843l, new RouteTripSpec(843l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5442", "5445", "1881", "1322" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(844l, new RouteTripSpec(844l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, FRASER, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW_EAST_TC) // QUEEN_ELIZABETH
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"7358", // 95 Street & 132 Avenue #QueenElizabeth
"7286", // 82 Street & 132 Avenue
"7330", // ==
"7206", // Belvedere Transit Centre
"7210", // Belvedere Transit Centre
"7335", // ==
"7104", // East Clareview Transit Centre
"7470", // 26 Street & 151 Avenue
"7437" // 21 Street & 147 Avenue #Fraser
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"7437", // 21 Street & 147 Avenue #Fraser
"7470", // 26 Street & 151 Avenue
"7105" // East Clareview Transit Centre
})) //
.compileBothTripSort());
map2.put(845l, new RouteTripSpec(845l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7120", "7496", "7060", "7007", "7186", "7106" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7106", "7572", "7185", "7007" })) //
.compileBothTripSort());
map2.put(846l, new RouteTripSpec(846l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BATURYN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5180", "6091", "6028", "6294" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(848l, new RouteTripSpec(848l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAGO_LINDO, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CARDINAL_LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "7659", "6315", "7377", "7483" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(849l, new RouteTripSpec(849l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW_EAST_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, QUEEN_ELIZABETH) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7358", "7209", "7823", "7943", "7269", "7101" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(851l, new RouteTripSpec(851l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KLARVATTEN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CARDINAL_LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "7728", "7827", "7434" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(852l, new RouteTripSpec(852l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_O_LEARY) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "7496", "6130", "6522", "6011", "6127" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(853l, new RouteTripSpec(853l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELVEDERE_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE_TC) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7585", "7204" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7496", "7008" })) //
.compileBothTripSort());
map2.put(855l, new RouteTripSpec(855l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_O_LEARY) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "7496", "6301", "6039", "6447" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(856l, new RouteTripSpec(856l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JH_PICARD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2779", "2824", "1729" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(858l, new RouteTripSpec(858l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STADIUM_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, AMISKWACIY) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1979", "1110", "1401" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(859l, new RouteTripSpec(859l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ARCH_MAC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"5055", //
"5548", "5207", //
"5549", // ==
"1759", // !=
"1867", "1735", // !=
"6122", // ==
"6333", "7011" //
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(860l, new RouteTripSpec(860l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_NALLY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2676", "3230", "3217" })) //
.compileBothTripSort());
map2.put(861l, new RouteTripSpec(861l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_NALLY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2676", "3230", "3247", "3446" })) //
.compileBothTripSort());
map2.put(862l, new RouteTripSpec(862l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, OTTEWELL, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS) // BURNEWOOD
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
/* no stops *///
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"2487", // 73 Street & 94B Avenue
"2064", // !=
"2360", // ==
"2426", // ==
"2915", // != 61 Street & 95 Avenue
"2360", // ==
"2426", // ==
"2434", // !=
"3230", // 49 Street & 44 Avenue
"3704", // Meadows Transit Centre
"3185", // 34 Street & 35A Avenue
})) //
.compileBothTripSort());
map2.put(864l, new RouteTripSpec(864l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_NALLY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2676", "2196", "2393", "2188", "2385", "2103" })) //
.compileBothTripSort());
map2.put(865l, new RouteTripSpec(865l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, TD_BAKER, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3150", "3212" })) //
.compileBothTripSort());
map2.put(866l, new RouteTripSpec(866l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ST_KEVIN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "2439", "2307" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(867l, new RouteTripSpec(867l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAKEWOOD) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "3002", "3217" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(869l, new RouteTripSpec(869l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, OTTEWELL_AOB, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
/* no stops *///
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"2487", // 73 Street & 94B Avenue
"2064", // !=
"2360", // ==
"2426", // ==
"2915", // != 61 Street & 95 Avenue
"2360", // ==
"2426", // ==
"2434", // !=
"3355", // 50 Street & Jamha Road
"3411", // 23 Street & 37A Avenue
"3217", // Mill Woods Transit Centre
})) //
.compileBothTripSort());
map2.put(870l, new RouteTripSpec(870l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHWOOD, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAKEWOOD) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "3002", "3204", "3142" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(871l, new RouteTripSpec(871l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELLE_RIVE, // LAGO_LINDO
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MARY_BUTTERWORTH) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "6285", "7377", "7780", "7430" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(872l, new RouteTripSpec(872l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WP_WAGNER, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2418", "2103", "3003", "3214" })) //
.compileBothTripSort());
map2.put(873l, new RouteTripSpec(873l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KENILWORTH, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WOODVALE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2160", "3461" })) //
.compileBothTripSort());
map2.put(874l, new RouteTripSpec(874l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WP_WAGNER, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2189", "3204", "3142" })) //
.compileBothTripSort());
map2.put(875l, new RouteTripSpec(875l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WP_WAGNER, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2418", "3529", "3211" })) //
.compileBothTripSort());
map2.put(876l, new RouteTripSpec(876l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WP_WAGNER, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2189", "3203", "3356" })) //
.compileBothTripSort());
map2.put(877l, new RouteTripSpec(877l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JH_PICARD, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2080", "2640", "2245", "3004", "3201" })) //
.compileBothTripSort());
map2.put(878l, new RouteTripSpec(878l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HARDISTY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2177", "3355", "3217" })) //
.compileBothTripSort());
map2.put(879l, new RouteTripSpec(879l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, OTTEWELL, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE) // Mill Woods?
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2487", "2188", "2526", "2103" })) //
.compileBothTripSort());
map2.put(880l, new RouteTripSpec(880l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KENILWORTH, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2160", "2188", "2105", "3529", "3211" })) //
.compileBothTripSort());
map2.put(881l, new RouteTripSpec(881l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CAPILANO, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_NALLY) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "2151", "2301" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(882l, new RouteTripSpec(882l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, KENILWORTH, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2160", "2188", "2526", "2103", "3003", "3214" })) //
.compileBothTripSort());
map2.put(883l, new RouteTripSpec(883l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, VERNON_BARFORD, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, TWIN_BROOKS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { //
"4340", // 119 Street & Fairway Drive
"4238", // 119 Street & Fairway Drive
"4265", // Twin Brooks Drive & 12 Avenue
"4248", // Running Creek Road & 12 Avenue
})) //
.compileBothTripSort());
map2.put(884l, new RouteTripSpec(884l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WP_WAGNER, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAKEWOOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2418", "3008", "3023", "3008" })) //
.compileBothTripSort());
map2.put(885l, new RouteTripSpec(885l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, VERNON_BARFORD, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4270", "4238", "4214" })) //
.compileBothTripSort());
map2.put(886l, new RouteTripSpec(886l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, AVALON, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2009", "2207" })) //
.compileBothTripSort());
map2.put(887l, new RouteTripSpec(887l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HARRY_AINLAY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, TWIN_BROOKS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4143", "4204", "4265", "4248" })) //
.compileBothTripSort());
map2.put(888l, new RouteTripSpec(888l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, VERNON_BARFORD) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "4270", "4238", "4205" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(889l, new RouteTripSpec(889l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2206", "4490", "4143", "4198", "4205", //
"4290", "4203" })) //
.compileBothTripSort());
map2.put(890l, new RouteTripSpec(890l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2974", "2201" })) //
.compileBothTripSort());
map2.put(892l, new RouteTripSpec(892l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HARRY_AINLAY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4490", "4486", "4208" })) //
.compileBothTripSort());
map2.put(893l, new RouteTripSpec(893l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, HARRY_AINLAY) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "4490", "3004", "3217" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(894l, new RouteTripSpec(894l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA) // Allendale
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "2741", "2974", "2102" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(895l, new RouteTripSpec(895l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2821", "2664", "2212" })) //
.compileBothTripSort());
map2.put(896l, new RouteTripSpec(896l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RIVERBEND, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4021", "4803" })) //
.compileBothTripSort());
map2.put(897l, new RouteTripSpec(897l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAKEWOOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3002", "3214", "3740", "2110" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(898l, new RouteTripSpec(898l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_TC) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2974", "2102", "3001", "3217" })) //
.compileBothTripSort());
map2.put(899l, new RouteTripSpec(899l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5991", "5061", "5069", "5903", "5012" })) //
.compileBothTripSort());
map2.put(901l, new RouteTripSpec(901l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LONDONDERRY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, _142_ST_109_AVE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "5055", "5548", "7011", "6304", "7456" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(902l, new RouteTripSpec(902l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, PARKVIEW, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ORMSBY_PL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5656", "5611", "5755", "5828", "5725" })) //
.compileBothTripSort());
map2.put(903l, new RouteTripSpec(903l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, QUEEN_ELIZABETH) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7383", "7260", "7909" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(907l, new RouteTripSpec(907l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HADDOW) // Rhatigan Rdg
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2974", "4021", "4016" })) //
.compileBothTripSort());
map2.put(908l, new RouteTripSpec(908l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, FRASER, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EASTGLEN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1033", "7237" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(909l, new RouteTripSpec(909l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EASTGLEN) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "1185", "7120", "7009", "6315" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(913l, new RouteTripSpec(913l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5211", "7011", "6313" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(914l, new RouteTripSpec(914l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, OXFORD, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5207", "6328", "6337" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(916l, new RouteTripSpec(916l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BATURYN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"5206", //
"1725", // ==
"1759", // !=
"1867", "1735", // !=
"6122", // ==
"6002"//
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(917l, new RouteTripSpec(917l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, FR_TROY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JACKSON_HTS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3411", "3267" })) //
.compileBothTripSort());
map2.put(918l, new RouteTripSpec(918l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, FR_TROY, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JACKSON_HTS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3964", "3420" })) //
.compileBothTripSort());
map2.put(919l, new RouteTripSpec(919l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, EASTGLEN, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "1033", "1521", "1001" })) //
.compileBothTripSort());
map2.put(920l, new RouteTripSpec(920l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MINCHAU, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HOLY_FAMILY) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "3153", "3363" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(921l, new RouteTripSpec(921l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_NALLY, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SILVERBERRY) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2676", "3230", "3419" })) //
.compileBothTripSort());
map2.put(922l, new RouteTripSpec(922l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WESTMOUNT, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5548", "4579", "4806" })) //
.compileBothTripSort());
map2.put(923l, new RouteTripSpec(923l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, SOUTHGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, TWIN_BROOKS) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2204", "4490", "4204", "4265", "4248" })) //
.compileBothTripSort());
map2.put(924l, new RouteTripSpec(924l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DAN_KNOTT) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "3572", "3006", "3208" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(925l, new RouteTripSpec(925l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, STRATHCONA, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WINDSOR_PARK) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2974", "2844" })) //
.compileBothTripSort());
map2.put(926l, new RouteTripSpec(926l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, OTTEWELL, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { //
"2050", // 61 Street & 94B Avenue
"2287", // Ottewell Road & 94 Avenue
"2752", // == 112 Street & 82 Avenue
"2982", // != 114 Street & 83 Avenue
"22354", // != 114 Street & 83 Avenue
"2638", // == 114 Street & 85 Avenue
"2001", // University Transit Centre
"2702", // South Campus Transit Centre Fort Edmonton Park
"5296", // ++
"5006", // West Edmonton Mall Transit Centre
})) //
.compileBothTripSort());
map2.put(931l, new RouteTripSpec(931l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LA_ZERTE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LAGO_LINDO) // KLARVATTEN
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7384", "7483" })) //
.compileBothTripSort());
map2.put(932l, new RouteTripSpec(932l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CLAREVIEW_WEST_TC, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LA_ZERTE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "7384", "7241", "7604", "7901" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(934l, new RouteTripSpec(934l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LA_ZERTE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CASTLE_DOWNS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "7572", "6311", "6008" })) //
.compileBothTripSort());
map2.put(935l, new RouteTripSpec(935l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, HOLLICK_KENYON, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_LEOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "7160", "7535", "7298", "7140" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(939l, new RouteTripSpec(939l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ELSINORE, // CHAMBERY
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MARY_BUTTERWORTH) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "6285", "6166", "6674" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(940l, new RouteTripSpec(940l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, AMISKWACIY) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "1979", "1476", "1201", "1001" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(941l, new RouteTripSpec(941l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ABBOTTSFIELD, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, AOB) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"2915", //
"1086", // ==
"1001", // !=
"1003" // !=
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(943l, new RouteTripSpec(943l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, BELVEDERE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, AOB) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "2915", "1206", "7210" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(945l, new RouteTripSpec(945l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, EAUX_CLAIRES, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, _88_ST_132_AVE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "7496", "6315" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(948l, new RouteTripSpec(948l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, HILLCREST, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LA_PERLE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5713", "5011", "5024" })) //
.compileBothTripSort());
map2.put(949l, new RouteTripSpec(949l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, HILLCREST, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LA_PERLE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5713", "5012", "5024" })) //
.compileBothTripSort());
map2.put(950l, new RouteTripSpec(950l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BERIAULT, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LYMBURN_ORMSBY_PL) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] {/* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5894", "5419", "5725" })) //
.compileBothTripSort());
map2.put(952l, new RouteTripSpec(952l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, CRESTWOOD, // RIO_TERRACE
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, ST_FRANCIS_XAVIER) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { //
"5071", "5174", "5433", //
"5588", // ==
"5198", // !=
"5043", // !=
"5120" // !=
})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(953l, new RouteTripSpec(953l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LYMBURN) // ORMSBY_PL
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5355", "5004", "5755", "5828", "5725" })) //
.compileBothTripSort());
map2.put(954l, new RouteTripSpec(954l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEDGEWOOD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5687", "5002", "5979", "5968" })) //
.compileBothTripSort());
map2.put(955l, new RouteTripSpec(955l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, LA_PERLE) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5355", "5011", "5024" })) //
.compileBothTripSort());
map2.put(956l, new RouteTripSpec(956l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, JASPER_PLACE, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, THE_GRANGE) // THE_HAMPTONS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5687", "8135", "8097", "8102" })) //
.compileBothTripSort());
map2.put(957l, new RouteTripSpec(957l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, OSCAR_ROMERO, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, THE_HAMPTONS) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "5980", "5695", "8583", "8033", "8670" })) //
.compileBothTripSort());
map2.put(959l, new RouteTripSpec(959l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WEST_EDM_MALL_TC, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, OSCAR_ROMERO) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "5695", "5002" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
map2.put(965l, new RouteTripSpec(965l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BRAEMAR, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "2462", "1989" })) //
.compileBothTripSort());
map2.put(966l, new RouteTripSpec(966l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, OTTEWELL_AOB, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2487", //
"2064", //
"2915", //
"2360", //
"2426", //
"2434", //
"3355", "3157", "3217" })) //
.compileBothTripSort());
map2.put(967l, new RouteTripSpec(967l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WHITEMUD_DR_53_AVE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4021", "4353", "4809" })) //
.compileBothTripSort());
map2.put(968l, new RouteTripSpec(968l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, ST_ROSE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "5611", "4579", "4806" })) //
.compileBothTripSort());
map2.put(969l, new RouteTripSpec(969l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WHITEMUD_DR_53_AVE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, LEGER) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4021", "4129", "4804" })) //
.compileBothTripSort());
map2.put(970l, new RouteTripSpec(970l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, WHITEMUD_DR_53_AVE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, JOSEPH_MC_NEIL) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { //
"4950", "4636", "4811", "4597", //
"4158", //
"4153" //
})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] {/* no stops */})) //
.compileBothTripSort());
map2.put(971l, new RouteTripSpec(971l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, _84_ST_105_AVE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, _34_ST_35A_AVE) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2676", "3355", "3708", "3185" })) //
.compileBothTripSort());
map2.put(972l, new RouteTripSpec(972l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MAC_EWAN, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RUTHERFORD) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { "9251", "9848", "9685" })) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4213", "9666", "9242", "9251" })) //
.compileBothTripSort());
map2.put(973l, new RouteTripSpec(973l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_PHERSON, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BURNEWOOD) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3603", "3267" })) //
.compileBothTripSort());
map2.put(974l, new RouteTripSpec(974l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MC_PHERSON, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, BURNEWOOD) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { "3603", "3420" })) //
.compileBothTripSort());
map2.put(975l, new RouteTripSpec(975l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILLGATE, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "2106", "3355", "3748", "3185", "3206" })) //
.compileBothTripSort());
map2.put(976l, new RouteTripSpec(976l, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, RIVERBEND, //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, CENTURY_PK) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(new String[] { "4021", "4803", "4202" })) //
.compileBothTripSort());
map2.put(977l, new RouteTripSpec(977l, //
MDirectionType.EAST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MEADOWS, //
MDirectionType.WEST.intValue(), MTrip.HEADSIGN_TYPE_STRING, MILL_WOODS_) //
.addTripSort(MDirectionType.EAST.intValue(), //
Arrays.asList(new String[] { "3217", "3470", "3703" })) //
.addTripSort(MDirectionType.WEST.intValue(), //
Arrays.asList(new String[] { /* no stops */})) //
.compileBothTripSort());
ALL_ROUTE_TRIPS2 = map2;
}
@Override
public Pair<Long[], Integer[]> splitTripStop(MRoute mRoute, GTrip gTrip, GTripStop gTripStop, ArrayList<MTrip> splitTrips, GSpec routeGTFS) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) {
return SplitUtils.splitTripStop(mRoute, gTrip, gTripStop, routeGTFS, ALL_ROUTE_TRIPS2.get(mRoute.getId()));
}
return super.splitTripStop(mRoute, gTrip, gTripStop, splitTrips, routeGTFS);
}
private static final Pattern N_A_I_T = Pattern.compile("((^|\\W){1}(n a i t)(\\W|$){1})", Pattern.CASE_INSENSITIVE);
private static final String N_A_I_T_REPLACEMENT = "$2" + NAIT + "$4";
private static final Pattern SUPER_EXPRESS = Pattern.compile("((^|\\W){1}(super express)(\\W|$){1})", Pattern.CASE_INSENSITIVE);
private static final Pattern STARTS_WITH_RSN = Pattern.compile("(^[\\d]+\\s)", Pattern.CASE_INSENSITIVE);
private static final String VIA = " via ";
@Override
public String cleanTripHeadsign(String tripHeadsign) {
int indexOfVIA = tripHeadsign.toLowerCase(Locale.ENGLISH).indexOf(VIA);
if (indexOfVIA >= 0) {
tripHeadsign = tripHeadsign.substring(indexOfVIA); // remove trip head sign from stop head sign
}
tripHeadsign = STARTS_WITH_RSN.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
tripHeadsign = TRANSIT_CENTER.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
tripHeadsign = TOWN_CENTER.matcher(tripHeadsign).replaceAll(TOWN_CENTER_REPLACEMENT);
tripHeadsign = SUPER_EXPRESS.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
tripHeadsign = EDMONTON.matcher(tripHeadsign).replaceAll(EDMONTON_REPLACEMENT);
tripHeadsign = N_A_I_T.matcher(tripHeadsign).replaceAll(N_A_I_T_REPLACEMENT);
tripHeadsign = CleanUtils.cleanStreetTypes(tripHeadsign);
tripHeadsign = CleanUtils.cleanNumbers(tripHeadsign);
tripHeadsign = CleanUtils.removePoints(tripHeadsign);
return CleanUtils.cleanLabel(tripHeadsign);
}
private static final Pattern TRANSIT_CENTER = Pattern.compile("((^|\\W){1}(transit center|transit centre)(\\W|$){1})", Pattern.CASE_INSENSITIVE);
private static final String TRANSIT_CENTER_REPLACEMENT = "$2" + TRANSIT_CENTER_SHORT + "$4";
private static final Pattern TOWN_CENTER = Pattern.compile("((^|\\W){1}(town center|town centre)(\\W|$){1})", Pattern.CASE_INSENSITIVE);
private static final String TOWN_CENTER_REPLACEMENT = "$2TC$4";
private static final Pattern INTERNATIONAL = Pattern.compile("((^|\\W){1}(international)(\\W|$){1})", Pattern.CASE_INSENSITIVE);
private static final String INTERNATIONAL_REPLACEMENT = "$2Int$4";
private static final Pattern EDMONTON = Pattern.compile("((^|\\W){1}(edmonton)(\\W|$){1})", Pattern.CASE_INSENSITIVE);
private static final String EDMONTON_REPLACEMENT = "$2" + EDM + "$4";
@Override
public String cleanStopName(String gStopName) {
gStopName = TRANSIT_CENTER.matcher(gStopName).replaceAll(TRANSIT_CENTER_REPLACEMENT);
gStopName = TOWN_CENTER.matcher(gStopName).replaceAll(TOWN_CENTER_REPLACEMENT);
gStopName = INTERNATIONAL.matcher(gStopName).replaceAll(INTERNATIONAL_REPLACEMENT);
gStopName = EDMONTON.matcher(gStopName).replaceAll(EDMONTON_REPLACEMENT);
gStopName = CleanUtils.cleanStreetTypes(gStopName);
gStopName = CleanUtils.cleanNumbers(gStopName);
return CleanUtils.cleanLabel(gStopName);
}
@Override
public int getStopId(GStop gStop) {
return Math.abs(super.getStopId(gStop)); // remove negative stop IDs
}
private static final Pattern REMOVE_STARTING_DASH = Pattern.compile("(^\\-)", Pattern.CASE_INSENSITIVE);
@Override
public String getStopCode(GStop gStop) {
String stopCode = super.getStopCode(gStop); // do not change, used by real-time API
stopCode = REMOVE_STARTING_DASH.matcher(stopCode).replaceAll(StringUtils.EMPTY);
return stopCode; // do not change, used by real-time API
}
}
| Compatibility with latest update.
| src/org/mtransit/parser/ca_edmonton_ets_bus/EdmontonETSBusAgencyTools.java | Compatibility with latest update. | <ide><path>rc/org/mtransit/parser/ca_edmonton_ets_bus/EdmontonETSBusAgencyTools.java
<ide> return excludeUselessCalendarDate(gCalendarDates, this.serviceIds);
<ide> }
<ide> return super.excludeCalendarDate(gCalendarDates);
<add> }
<add>
<add> @Override
<add> public boolean excludeRoute(GRoute gRoute) {
<add> return super.excludeRoute(gRoute);
<ide> }
<ide>
<ide> @Override
<ide> "7106", "7572", "7008", "7496", "7007" //
<ide> })) //
<ide> .compileBothTripSort());
<del> map2.put(12l, new RouteTripSpec(12l, //
<add> map2.put(12L, new RouteTripSpec(12L, //
<ide> MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, NORTHGATE, //
<ide> MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, DOWNTOWN) //
<ide> .addTripSort(MDirectionType.NORTH.intValue(), //
<ide> Arrays.asList(new String[] { //
<del> "1251", "1529", //
<del> "1476", "1434", "1435", //
<del> "1553", // ==
<del> "1032", // !=
<del> "1109", // ==
<del> "1886", // !=
<del> "11307", //
<del> "1821", //
<del> "1669", //
<del> "6122", //
<del> "6328", "6252", "7003" //
<del> })) //
<del> .addTripSort(MDirectionType.SOUTH.intValue(), //
<del> Arrays.asList(new String[] { //
<del> "7003", "6551", //
<del> "6369", //
<del> "6289", //
<del> "6372", //
<del> "-11330", //
<del> "1932", "1847", //
<del> "1778", //
<del> "1847", "1951", //
<del> "1109", "1533", "1476", //
<del> "11326", "1113", "1251" //
<add> "1110", // Kingsway RAH Transit Centre
<add> "7003", // Northgate Transit Centre
<add> })) //
<add> .addTripSort(MDirectionType.SOUTH.intValue(), //
<add> Arrays.asList(new String[] { //
<add> "7003", // Northgate Transit Centre
<add> "1110", // Kingsway RAH Transit Centre
<ide> })) //
<ide> .compileBothTripSort());
<ide> map2.put(13l, new RouteTripSpec(13l, // |
|
Java | mit | 56b374ee247eef3ca787f878ef3c287cfbdbfb80 | 0 | aterai/java-swing-tips,aterai/java-swing-tips,aoguren/java-swing-tips,aterai/java-swing-tips,mhcrnl/java-swing-tips,mhcrnl/java-swing-tips,aterai/java-swing-tips,mhcrnl/java-swing-tips,aoguren/java-swing-tips,aoguren/java-swing-tips | package example;
//-*- mode:java; encoding:utf8n; coding:utf-8 -*-
// vim:set fileencoding=utf-8:
//@homepage@
import java.awt.*;
import java.util.*;
import javax.swing.*;
import javax.swing.table.*;
public class TestModel extends DefaultTableModel {
private static final ColumnContext[] columnArray = {
new ColumnContext("No.", Integer.class, false),
new ColumnContext("Name", String.class, false),
new ColumnContext("Progress", Integer.class, false)
};
private final Map<Integer, SwingWorker> swmap = new HashMap<Integer, SwingWorker>();
private int number = 0;
public void addTest(Test t, SwingWorker worker) {
Object[] obj = {number, t.getName(), t.getProgress()};
super.addRow(obj);
swmap.put(number, worker);
number++;
}
public synchronized SwingWorker getSwingWorker(int identifier) {
Integer key = (Integer)getValueAt(identifier, 0);
return swmap.get(key);
}
public Test getTest(int identifier) {
return new Test((String)getValueAt(identifier,1), (Integer)getValueAt(identifier,2));
}
@Override public boolean isCellEditable(int row, int col) {
return columnArray[col].isEditable;
}
@Override public Class<?> getColumnClass(int modelIndex) {
return columnArray[modelIndex].columnClass;
}
@Override public int getColumnCount() {
return columnArray.length;
}
@Override public String getColumnName(int modelIndex) {
return columnArray[modelIndex].columnName;
}
private static class ColumnContext {
public final String columnName;
public final Class columnClass;
public final boolean isEditable;
public ColumnContext(String columnName, Class columnClass, boolean isEditable) {
this.columnName = columnName;
this.columnClass = columnClass;
this.isEditable = isEditable;
}
}
}
class Test{
private String name;
private Integer progress;
public Test(String name, Integer progress) {
this.name = name;
this.progress = progress;
}
public void setName(String str) {
name = str;
}
public void setProgress(Integer str) {
progress = str;
}
public String getName() {
return name;
}
public Integer getProgress() {
return progress;
}
}
class ProgressRenderer extends DefaultTableCellRenderer {
private final JProgressBar b = new JProgressBar(0, 100);
public ProgressRenderer() {
super();
setOpaque(true);
b.setBorder(BorderFactory.createEmptyBorder(1,1,1,1));
}
@Override public Component getTableCellRendererComponent(JTable table, Object value,
boolean isSelected, boolean hasFocus,
int row, int column) {
Integer i = (Integer)value;
String text = "Done";
if(i<0) {
text = "Canceled";
}else if(i<100) {
b.setValue(i);
return b;
}
super.getTableCellRendererComponent(table, text, isSelected, hasFocus, row, column);
return this;
}
}
| TableCellProgressBar/src/java/example/TestModel.java | package example;
//-*- mode:java; encoding:utf8n; coding:utf-8 -*-
// vim:set fileencoding=utf-8:
//@homepage@
import java.awt.*;
import java.util.*;
import javax.swing.*;
import javax.swing.table.*;
public class TestModel extends DefaultTableModel {
private static final ColumnContext[] columnArray = {
new ColumnContext("No.", Integer.class, false),
new ColumnContext("Name", String.class, false),
new ColumnContext("Progress", Integer.class, false)
};
private final Map<Integer, SwingWorker> swmap = new HashMap<Integer, SwingWorker>();
private int number = 0;
public void addTest(Test t, SwingWorker worker) {
Object[] obj = {number, t.getName(), t.getProgress()};
super.addRow(obj);
swmap.put(number, worker);
number++;
}
public synchronized SwingWorker getSwingWorker(int identifier) {
Integer key = (Integer)getValueAt(identifier, 0);
return swmap.get(key);
}
public Test getTest(int identifier) {
return new Test((String)getValueAt(identifier,1), (Integer)getValueAt(identifier,2));
}
@Override public boolean isCellEditable(int row, int col) {
return columnArray[col].isEditable;
}
@Override public Class<?> getColumnClass(int modelIndex) {
return columnArray[modelIndex].columnClass;
}
@Override public int getColumnCount() {
return columnArray.length;
}
@Override public String getColumnName(int modelIndex) {
return columnArray[modelIndex].columnName;
}
private static class ColumnContext {
public final String columnName;
public final Class columnClass;
public final boolean isEditable;
public ColumnContext(String columnName, Class columnClass, boolean isEditable) {
this.columnName = columnName;
this.columnClass = columnClass;
this.isEditable = isEditable;
}
}
}
class Test{
private String name;
private Integer progress;
public Test(String name, Integer progress) {
this.name = name;
this.progress = progress;
}
public void setName(String str) {
name = str;
}
public void setProgress(Integer str) {
progress = str;
}
public String getName() {
return name;
}
public Integer getProgress() {
return progress;
}
}
class ProgressRenderer extends DefaultTableCellRenderer {
private final JProgressBar b = new JProgressBar(0, 100);
public ProgressRenderer() {
super();
setOpaque(true);
b.setBorder(BorderFactory.createEmptyBorder(1,1,1,1));
}
@Override public Component getTableCellRendererComponent(JTable table, Object value,
boolean isSelected, boolean hasFocus,
int row, int column) {
Integer i = (Integer)value;
String text = "Done";
if(i<0) {
text = "Canceled";
}else if(i<100) {
b.setValue(i);
return b;
}
super.getTableCellRendererComponent(table, text, isSelected, hasFocus, row, column);
return this;
}
}
| indent | TableCellProgressBar/src/java/example/TestModel.java | indent | <ide><path>ableCellProgressBar/src/java/example/TestModel.java
<ide>
<ide> public class TestModel extends DefaultTableModel {
<ide> private static final ColumnContext[] columnArray = {
<del> new ColumnContext("No.", Integer.class, false),
<del> new ColumnContext("Name", String.class, false),
<add> new ColumnContext("No.", Integer.class, false),
<add> new ColumnContext("Name", String.class, false),
<ide> new ColumnContext("Progress", Integer.class, false)
<ide> };
<ide> private final Map<Integer, SwingWorker> swmap = new HashMap<Integer, SwingWorker>(); |
|
Java | apache-2.0 | daa5d1fd8e4ce57ebbab3daf6c502dede6d52f5c | 0 | TU-Berlin/mathosphere,TU-Berlin/mathosphere | package com.formulasearchengine.mathosphere.mathpd;
import com.formulasearchengine.mathosphere.mathpd.cli.FlinkPdCommandConfig;
import com.formulasearchengine.mathosphere.mathpd.contracts.ExtractedMathPDDocumentMapper;
import com.formulasearchengine.mathosphere.mathpd.contracts.TextExtractorMapper;
import com.formulasearchengine.mathosphere.mathpd.pojos.ExtractedMathPDDocument;
import com.formulasearchengine.mathosphere.mlp.contracts.CreateCandidatesMapper;
import com.formulasearchengine.mathosphere.mlp.contracts.JsonSerializerMapper;
import com.formulasearchengine.mathosphere.mlp.contracts.TextAnnotatorMapper;
import com.formulasearchengine.mathosphere.mlp.pojos.ParsedWikiDocument;
import com.formulasearchengine.mathosphere.mlp.pojos.WikiDocumentOutput;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.GroupReduceFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.operators.Order;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.io.TextInputFormat;
import org.apache.flink.api.java.io.TextOutputFormat;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.operators.FlatMapOperator;
import org.apache.flink.api.java.operators.GroupReduceOperator;
import org.apache.flink.api.java.tuple.*;
import org.apache.flink.core.fs.FileSystem.WriteMode;
import org.apache.flink.core.fs.Path;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.text.DecimalFormat;
import java.util.Base64;
import java.util.HashMap;
public class FlinkPd {
private static final Logger LOGGER = LoggerFactory.getLogger(FlinkPd.class);
private static final int NUMBER_OF_ALL_DOCS = 4; // only used in TF_IDF mode
private static final double EPSILON = 0.00000000000000000001;
private static final boolean IS_MODE_TFIDF = false; // if false, we use relative similarity
private static final boolean IS_MODE_PREPROCESSING = true;
private static DecimalFormat decimalFormat = new DecimalFormat("0.0");
public static void main(String[] args) throws Exception {
FlinkPdCommandConfig config = FlinkPdCommandConfig.from(args);
run(config);
}
//private static String generateIdPair(String id1, String id2) {
// return id1 + "-" + id2;
//}
//private static String getIdFromIdPair(String idPair, int index) {
// return idPair.split("-")[index];
//}
private static void collectElementFrequencies(HashMap<String, Double> histogramOfDimension, String dimension, Collector<Tuple3<String, String, Double>> collector) {
for (String key : histogramOfDimension.keySet()) {
//collector.collect(new Tuple3<>(dimension, key, histogramOfDimension.get(key))); // this would be the term frequency in the whole dataset,
collector.collect(new Tuple3<>(dimension, key, 1.0)); // but IDF is actually the number of documents that contain the term
}
}
private static void convertAbsoluteHistogramToTFIDFHistogram(ExtractedMathPDDocument doc, ExtractedMathPDDocument tfidfDoc, String dimensionName, String elementName, double df) {
HashMap<String, Double> histogramIn = null;
HashMap<String, Double> histogramOut = null;
switch (dimensionName) {
case "bvar":
histogramIn = doc.getHistogramBvar();
histogramOut = tfidfDoc.getHistogramBvar();
break;
case "ci":
histogramIn = doc.getHistogramCi();
histogramOut = tfidfDoc.getHistogramCi();
break;
case "cn":
histogramIn = doc.getHistogramCn();
histogramOut = tfidfDoc.getHistogramCn();
break;
case "csymbol":
histogramIn = doc.getHistogramCsymbol();
histogramOut = tfidfDoc.getHistogramCsymbol();
break;
default:
throw new RuntimeException("unknown dimension");
}
histogramOut.put(elementName, histogramOut.getOrDefault(elementName, 0.0) +
histogramIn.getOrDefault(elementName, 0.0)
* (EPSILON + Math.log(NUMBER_OF_ALL_DOCS / (df + EPSILON))));
}
/**
* This function takes math pd snippets and converts them to single documents (by merging all snippets belonging to the same document)
*
* @param extractedMathPdSnippets
* @return
*/
private static DataSet<Tuple2<String, ExtractedMathPDDocument>> aggregateSnippetsToSingleDocs(FlatMapOperator<String, Tuple2<String, ExtractedMathPDDocument>> extractedMathPdSnippets) {
DataSet<Tuple2<String, ExtractedMathPDDocument>> extractedMathPdDocuments = extractedMathPdSnippets
.groupBy(0)
.reduce(new ReduceFunction<Tuple2<String, ExtractedMathPDDocument>>() {
@Override
public Tuple2<String, ExtractedMathPDDocument> reduce(Tuple2<String, ExtractedMathPDDocument> t0, Tuple2<String, ExtractedMathPDDocument> t1) throws Exception {
t1.f1.mergeOtherIntoThis(t0.f1);
t1.f1.setText("removed");
LOGGER.info("merged {} into {}", new Object[]{t1.f0, t0.f0});
return t1;
}
});
return extractedMathPdDocuments;
}
public static void run(FlinkPdCommandConfig config) throws Exception {
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
final String preprocessedSourcesFiles = config.getDataset() + "_preprocessed";
String preprocessedRefsFiles = config.getRef() + "_preprocessed";
if (preprocessedRefsFiles.equals(preprocessedSourcesFiles)) {
preprocessedRefsFiles += "2";
}
if (IS_MODE_PREPROCESSING) {
DataSource<String> source = readWikiDump(config, env);
DataSource<String> refs = readRefs(config, env);
/* final FlatMapOperator<String, Tuple2<String, ExtractedMathPDDocument>> extractedMathPdSnippetsSources = source.flatMap(new TextExtractorMapper());
// first, merge all pages of one doc to one doc
DataSet<Tuple2<String, ExtractedMathPDDocument>> extractedMathPdDocumentsSources = aggregateSnippetsToSingleDocs(extractedMathPdSnippetsSources);
// write to disk
LOGGER.info("writing preprocesssed input to disk at {}", preprocessedRefsFiles
extractedMathPdDocumentsSources.writeAsFormattedText(preprocessedSourcesFiles,
new TextOutputFormat.TextFormatter<Tuple2<String, ExtractedMathPDDocument>>() {
@Override
public String format(Tuple2<String, ExtractedMathPDDocument> stringExtractedMathPDDocumentTuple2) {
return ExtractedMathPDDocumentMapper.getFormattedWritableText(stringExtractedMathPDDocumentTuple2.f1);
}
});*/
// now for the refs
final FlatMapOperator<String, Tuple2<String, ExtractedMathPDDocument>> extractedMathPdSnippetsRefs = refs.flatMap(new TextExtractorMapper());
// first, merge all pages of one doc to one doc
final DataSet<Tuple2<String, ExtractedMathPDDocument>> extractedMathPdDocumentsRefs = aggregateSnippetsToSingleDocs(extractedMathPdSnippetsRefs);
// write to disk
LOGGER.info("writing preprocesssed refs to disk at {}", preprocessedRefsFiles);
extractedMathPdDocumentsRefs.writeAsFormattedText(preprocessedRefsFiles,
new TextOutputFormat.TextFormatter<Tuple2<String, ExtractedMathPDDocument>>() {
@Override
public String format(Tuple2<String, ExtractedMathPDDocument> stringExtractedMathPDDocumentTuple2) {
LOGGER.info("input-ref {}: {}", stringExtractedMathPDDocumentTuple2.f0, stringExtractedMathPDDocumentTuple2.f1);
final String output = ExtractedMathPDDocumentMapper.getFormattedWritableText(stringExtractedMathPDDocumentTuple2.f1);
LOGGER.info("output-ref {}: {}", stringExtractedMathPDDocumentTuple2.f0, output);
final String outputB64 = Base64.getEncoder().encodeToString(output.getBytes());
LOGGER.info("output-ref {}: {}", stringExtractedMathPDDocumentTuple2.f0, outputB64);
return outputB64;
}
});
} else {
final DataSet<Tuple2<String, ExtractedMathPDDocument>> extractedMathPdDocumentsSources = readPreprocessedFiles(preprocessedSourcesFiles, env).flatMap(new ExtractedMathPDDocumentMapper());
final DataSet<Tuple2<String, ExtractedMathPDDocument>> extractedMathPdDocumentsRefs = readPreprocessedFiles(preprocessedRefsFiles, env).flatMap(new ExtractedMathPDDocumentMapper());
GroupReduceOperator<Tuple2<Tuple2<String, ExtractedMathPDDocument>, Tuple3<String, String, Double>>, Tuple2<String, ExtractedMathPDDocument>> extractedMathPDDocsWithTFIDF = null;
if (IS_MODE_TFIDF) {
//noinspection Convert2Lambda
final GroupReduceOperator<Tuple3<String, String, Double>, Tuple3<String, String, Double>> corpusWideElementFrequenciesByDimension = extractedMathPdDocumentsSources
.union(extractedMathPdDocumentsRefs)
.flatMap(new FlatMapFunction<Tuple2<String, ExtractedMathPDDocument>, Tuple3<String, String, Double>>() {
@Override
public void flatMap(Tuple2<String, ExtractedMathPDDocument> stringExtractedMathPDDocumentTuple2, Collector<Tuple3<String, String, Double>> collector) throws Exception {
final ExtractedMathPDDocument curDoc = stringExtractedMathPDDocumentTuple2.f1;
collectElementFrequencies(curDoc.getHistogramBvar(), "bvar", collector);
collectElementFrequencies(curDoc.getHistogramCi(), "ci", collector);
collectElementFrequencies(curDoc.getHistogramCn(), "cn", collector);
collectElementFrequencies(curDoc.getHistogramCsymbol(), "csymbol", collector);
}
})
.groupBy(0, 1)
.reduceGroup(new GroupReduceFunction<Tuple3<String, String, Double>, Tuple3<String, String, Double>>() {
@Override
public void reduce(Iterable<Tuple3<String, String, Double>> iterable, Collector<Tuple3<String, String, Double>> collector) throws Exception {
final HashMap<Tuple2<String, String>, Double> freqsInCorpus = new HashMap<>();
for (Tuple3<String, String, Double> i : iterable) {
final Tuple2<String, String> key = new Tuple2<>(i.f0, i.f1);
freqsInCorpus.put(key, freqsInCorpus.getOrDefault(key, 0.0) + i.f2);
}
for (Tuple2<String, String> key : freqsInCorpus.keySet()) {
collector.collect(new Tuple3<>(key.f0, key.f1, freqsInCorpus.get(key)));
}
}
});
// at this point we have in corpusWideElementFrequenciesByDimension the DF over all documents for each element in all dimensions (verified)
corpusWideElementFrequenciesByDimension.writeAsCsv(config.getOutputDir() + "_DF");
// now convert the absolute histograms into tfidf histograms
extractedMathPDDocsWithTFIDF =
extractedMathPdDocumentsSources
.cross(corpusWideElementFrequenciesByDimension)
.reduceGroup(new GroupReduceFunction<Tuple2<Tuple2<String, ExtractedMathPDDocument>, Tuple3<String, String, Double>>, Tuple2<String, ExtractedMathPDDocument>>() {
@Override
public void reduce(Iterable<Tuple2<Tuple2<String, ExtractedMathPDDocument>, Tuple3<String, String, Double>>> iterable,
Collector<Tuple2<String, ExtractedMathPDDocument>> collector) throws Exception {
final HashMap<String, ExtractedMathPDDocument> tfidfDocs = new HashMap<>();
for (Tuple2<Tuple2<String, ExtractedMathPDDocument>, Tuple3<String, String, Double>> pair : iterable) {
final ExtractedMathPDDocument curDoc = pair.f0.f1;
final String name = curDoc.getName();
final Tuple3<String, String, Double> curIDFTriple = pair.f1;
// get to tfidf doc
ExtractedMathPDDocument curTfidfDoc = tfidfDocs.get(name);
if (curTfidfDoc == null) {
curTfidfDoc = new ExtractedMathPDDocument(curDoc.title, curDoc.text);
curTfidfDoc.setName(curDoc.getName());
curTfidfDoc.setPage(curDoc.getPage());
tfidfDocs.put(name, curTfidfDoc);
}
convertAbsoluteHistogramToTFIDFHistogram(curDoc, curTfidfDoc, curIDFTriple.f0, curIDFTriple.f1, curIDFTriple.f2);
}
for (String name : tfidfDocs.keySet()) {
collector.collect(new Tuple2<>(name, tfidfDocs.get(name)));
}
}
});
}
DataSet distancesAndSectionPairs =
IS_MODE_TFIDF ? extractedMathPDDocsWithTFIDF : extractedMathPdDocumentsSources // if in TFIDF_MODE use tfidf docs, otherwise absolute frequency histogram docs
.groupBy(0)
.reduceGroup(new GroupReduceFunction<Tuple2<String, ExtractedMathPDDocument>, ExtractedMathPDDocument>() {
@Override
public void reduce(Iterable<Tuple2<String, ExtractedMathPDDocument>> iterable, Collector<ExtractedMathPDDocument> collector) throws Exception {
ExtractedMathPDDocument tmpDoc = null;
for (Tuple2<String, ExtractedMathPDDocument> i : iterable) {
if (tmpDoc == null) {
tmpDoc = i.f1;
} else {
tmpDoc.mergeOtherIntoThis(i.f1);
}
}
collector.collect(tmpDoc);
}
})
.cross(extractedMathPdDocumentsRefs
.groupBy(0)
.reduceGroup(new GroupReduceFunction<Tuple2<String, ExtractedMathPDDocument>, ExtractedMathPDDocument>() {
@Override
public void reduce(Iterable<Tuple2<String, ExtractedMathPDDocument>> iterable, Collector<ExtractedMathPDDocument> collector) throws Exception {
ExtractedMathPDDocument tmpDoc = null;
for (Tuple2<String, ExtractedMathPDDocument> i : iterable) {
if (tmpDoc == null) {
tmpDoc = i.f1;
} else {
tmpDoc.mergeOtherIntoThis(i.f1);
}
}
collector.collect(tmpDoc);
}
})
)
.reduceGroup(new GroupReduceFunction<Tuple2<ExtractedMathPDDocument, ExtractedMathPDDocument>, Tuple7<String, String, Double, Double, Double, Double, Double>>() {
@Override
public void reduce(Iterable<Tuple2<ExtractedMathPDDocument, ExtractedMathPDDocument>> iterable, Collector<Tuple7<String, String, Double, Double, Double, Double, Double>> collector) throws Exception {
for (Tuple2<ExtractedMathPDDocument, ExtractedMathPDDocument> i : iterable) {
if (i.f0 == null || i.f1 == null)
continue;
// skip one diagonal half of the matrix
//if (!i.f0.getId().contains("Original"))
// continue;
// only check Original against Plagiarism (not against other Originals)
//if (!i.f1.getId().contains("Plagiarism"))
// continue;
// Tuple4 contains (if cosine is used, the term distance actually means similarity, i.e.,
// -1=opposite, 0=unrelated, 1=same doc
// 1) total distance (accumulated distance of all others) - makes no sense in case of cosine distance
// 2) numbers
// 3) operators
// 4) identifiers
// 5) bound variables
Tuple4<Double, Double, Double, Double> distanceAllFeatures;
if (IS_MODE_TFIDF) {
distanceAllFeatures = Distances.distanceCosineAllFeatures(i.f0, i.f1);
} else {
distanceAllFeatures = Distances.distanceRelativeAllFeatures(i.f0, i.f1);
}
final Tuple7<String, String, Double, Double, Double, Double, Double> resultLine = new Tuple7<>(
i.f0.getId(),
i.f1.getId(),
Math.abs(distanceAllFeatures.f0) + Math.abs(distanceAllFeatures.f1) + Math.abs(distanceAllFeatures.f2) + Math.abs(distanceAllFeatures.f3),
distanceAllFeatures.f0,
distanceAllFeatures.f1,
distanceAllFeatures.f2,
distanceAllFeatures.f3
);
collector.collect(resultLine);
}
}
})
.sortPartition(1, Order.ASCENDING);
distancesAndSectionPairs.writeAsCsv(config.getOutputDir(), WriteMode.OVERWRITE);
// we can now use the distances and section pairs dataset to aggregate the distances on document level in distance bins
//noinspection Convert2Lambda
DataSet binnedDistancesForPairs =
distancesAndSectionPairs
.reduceGroup(new GroupReduceFunction<
Tuple7<String, String, Double, Double, Double, Double, Double>,
Tuple5<String, String, Double, Double, Double>>() {
@Override
public void reduce(Iterable<Tuple7<String, String, Double, Double, Double, Double, Double>> iterable, Collector<Tuple5<String, String, Double, Double, Double>> collector) throws Exception {
// histogram will contain as a key a tuple2 of the names of the two documents from the pair; and the bin
// the value will be the frequency of that bin in that pair of documents
final HashMap<Tuple4<String, String, Double, Double>, Double> histogramPairOfNameAndBinWithFrequency = new HashMap<>();
final HashMap<Tuple2<String, String>, Double> histogramPairOfNameWithFrequency = new HashMap<>();
for (Tuple7<String, String, Double, Double, Double, Double, Double> curPairWithDistances : iterable) {
final String id0 = curPairWithDistances.f0;
final String id1 = curPairWithDistances.f1;
final String name0 = ExtractedMathPDDocument.getNameFromId(id0);
final String name1 = ExtractedMathPDDocument.getNameFromId(id1);
double distance = curPairWithDistances.f2 / 4.0; // take the accumulated distance and normalize it
// the key3
final Tuple4<String, String, Double, Double> key =
new Tuple4<>(
name0,
name1,
getBinBoundary(distance, 0.2, true),
getBinBoundary(distance, 0.2, false));
final Tuple2<String, String> keyName = new Tuple2<String, String>(name0, name1);
// look up if something has been stored under this key
Double frequencyOfCurKey = histogramPairOfNameAndBinWithFrequency.getOrDefault(key, 0.0);
histogramPairOfNameAndBinWithFrequency.put(key, frequencyOfCurKey + 1.0);
// also update the pair's total frequency
histogramPairOfNameWithFrequency.put(keyName, histogramPairOfNameWithFrequency.getOrDefault(keyName, 0.0) + 1.0);
}
for (Tuple4<String, String, Double, Double> key : histogramPairOfNameAndBinWithFrequency.keySet()) {
collector.collect(new Tuple5<>(key.f0, key.f1, key.f2, key.f3, histogramPairOfNameAndBinWithFrequency.get(key) / histogramPairOfNameWithFrequency.get(new Tuple2<>(key.f0, key.f1))));
}
}
})
.sortPartition(0, Order.ASCENDING)
.sortPartition(1, Order.ASCENDING);
binnedDistancesForPairs.writeAsCsv(config.getOutputDir() + "_binned", WriteMode.OVERWRITE);
}
env.execute("You ad could be here! Call 4451");
}
private static double getBinBoundary(double value, double binWidth, boolean isLower) {
double flooredDivision = Math.floor(value / binWidth);
double binBoundary;
if (isLower)
binBoundary = binWidth * flooredDivision;
else
binBoundary = binWidth * (flooredDivision + 1);
return Double.valueOf(decimalFormat.format(binBoundary));
}
public static DataSource<String> readWikiDump(FlinkPdCommandConfig config, ExecutionEnvironment env) {
Path filePath = new Path(config.getDataset());
TextInputFormat inp = new TextInputFormat(filePath);
inp.setCharsetName("UTF-8");
inp.setDelimiter("</ARXIVFILESPLIT>");
return env.readFile(inp, config.getDataset());
}
public static DataSource<String> readRefs(FlinkPdCommandConfig config, ExecutionEnvironment env) {
Path filePath = new Path(config.getRef());
TextInputFormat inp = new TextInputFormat(filePath);
inp.setCharsetName("UTF-8");
inp.setDelimiter("</ARXIVFILESPLIT>");
return env.readFile(inp, config.getRef());
}
public static DataSource<String> readPreprocessedFiles(String pathname, ExecutionEnvironment env) {
Path filePath = new Path(pathname);
TextInputFormat inp = new TextInputFormat(filePath);
inp.setCharsetName("UTF-8");
return env.readFile(inp, pathname);
}
public String runFromText(FlinkPdCommandConfig config, String input) throws Exception {
final JsonSerializerMapper<Object> serializerMapper = new JsonSerializerMapper<>();
return serializerMapper.map(outDocFromText(config, input));
}
public WikiDocumentOutput outDocFromText(FlinkPdCommandConfig config, String input) throws Exception {
final TextAnnotatorMapper textAnnotatorMapper = new TextAnnotatorMapper(config);
textAnnotatorMapper.open(null);
final CreateCandidatesMapper candidatesMapper = new CreateCandidatesMapper(config);
final ParsedWikiDocument parsedWikiDocument = textAnnotatorMapper.parse(input);
return candidatesMapper.map(parsedWikiDocument);
}
}
| mathosphere-core/src/main/java/com/formulasearchengine/mathosphere/mathpd/FlinkPd.java | package com.formulasearchengine.mathosphere.mathpd;
import com.formulasearchengine.mathosphere.mathpd.cli.FlinkPdCommandConfig;
import com.formulasearchengine.mathosphere.mathpd.contracts.ExtractedMathPDDocumentMapper;
import com.formulasearchengine.mathosphere.mathpd.contracts.TextExtractorMapper;
import com.formulasearchengine.mathosphere.mathpd.pojos.ExtractedMathPDDocument;
import com.formulasearchengine.mathosphere.mlp.contracts.CreateCandidatesMapper;
import com.formulasearchengine.mathosphere.mlp.contracts.JsonSerializerMapper;
import com.formulasearchengine.mathosphere.mlp.contracts.TextAnnotatorMapper;
import com.formulasearchengine.mathosphere.mlp.pojos.ParsedWikiDocument;
import com.formulasearchengine.mathosphere.mlp.pojos.WikiDocumentOutput;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.functions.GroupReduceFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.operators.Order;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.io.TextInputFormat;
import org.apache.flink.api.java.io.TextOutputFormat;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.api.java.operators.FlatMapOperator;
import org.apache.flink.api.java.operators.GroupReduceOperator;
import org.apache.flink.api.java.tuple.*;
import org.apache.flink.core.fs.FileSystem.WriteMode;
import org.apache.flink.core.fs.Path;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.text.DecimalFormat;
import java.util.HashMap;
public class FlinkPd {
private static final Logger LOGGER = LoggerFactory.getLogger(FlinkPd.class);
private static final int NUMBER_OF_ALL_DOCS = 4; // only used in TF_IDF mode
private static final double EPSILON = 0.00000000000000000001;
private static final boolean IS_MODE_TFIDF = false; // if false, we use relative similarity
private static final boolean IS_MODE_PREPROCESSING = true;
private static DecimalFormat decimalFormat = new DecimalFormat("0.0");
public static void main(String[] args) throws Exception {
FlinkPdCommandConfig config = FlinkPdCommandConfig.from(args);
run(config);
}
//private static String generateIdPair(String id1, String id2) {
// return id1 + "-" + id2;
//}
//private static String getIdFromIdPair(String idPair, int index) {
// return idPair.split("-")[index];
//}
private static void collectElementFrequencies(HashMap<String, Double> histogramOfDimension, String dimension, Collector<Tuple3<String, String, Double>> collector) {
for (String key : histogramOfDimension.keySet()) {
//collector.collect(new Tuple3<>(dimension, key, histogramOfDimension.get(key))); // this would be the term frequency in the whole dataset,
collector.collect(new Tuple3<>(dimension, key, 1.0)); // but IDF is actually the number of documents that contain the term
}
}
private static void convertAbsoluteHistogramToTFIDFHistogram(ExtractedMathPDDocument doc, ExtractedMathPDDocument tfidfDoc, String dimensionName, String elementName, double df) {
HashMap<String, Double> histogramIn = null;
HashMap<String, Double> histogramOut = null;
switch (dimensionName) {
case "bvar":
histogramIn = doc.getHistogramBvar();
histogramOut = tfidfDoc.getHistogramBvar();
break;
case "ci":
histogramIn = doc.getHistogramCi();
histogramOut = tfidfDoc.getHistogramCi();
break;
case "cn":
histogramIn = doc.getHistogramCn();
histogramOut = tfidfDoc.getHistogramCn();
break;
case "csymbol":
histogramIn = doc.getHistogramCsymbol();
histogramOut = tfidfDoc.getHistogramCsymbol();
break;
default:
throw new RuntimeException("unknown dimension");
}
histogramOut.put(elementName, histogramOut.getOrDefault(elementName, 0.0) +
histogramIn.getOrDefault(elementName, 0.0)
* (EPSILON + Math.log(NUMBER_OF_ALL_DOCS / (df + EPSILON))));
}
/**
* This function takes math pd snippets and converts them to single documents (by merging all snippets belonging to the same document)
*
* @param extractedMathPdSnippets
* @return
*/
private static DataSet<Tuple2<String, ExtractedMathPDDocument>> aggregateSnippetsToSingleDocs(FlatMapOperator<String, Tuple2<String, ExtractedMathPDDocument>> extractedMathPdSnippets) {
DataSet<Tuple2<String, ExtractedMathPDDocument>> extractedMathPdDocuments = extractedMathPdSnippets
.groupBy(0)
.reduce(new ReduceFunction<Tuple2<String, ExtractedMathPDDocument>>() {
@Override
public Tuple2<String, ExtractedMathPDDocument> reduce(Tuple2<String, ExtractedMathPDDocument> t0, Tuple2<String, ExtractedMathPDDocument> t1) throws Exception {
t1.f1.mergeOtherIntoThis(t0.f1);
t1.f1.setText("removed");
LOGGER.info("merged {} into {}", new Object[]{t1.f0, t0.f0});
return t1;
}
});
return extractedMathPdDocuments;
}
public static void run(FlinkPdCommandConfig config) throws Exception {
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
final String preprocessedSourcesFiles = config.getDataset() + "_preprocessed";
String preprocessedRefsFiles = config.getRef() + "_preprocessed";
if (preprocessedRefsFiles.equals(preprocessedSourcesFiles)) {
preprocessedRefsFiles += "2";
}
if (IS_MODE_PREPROCESSING) {
DataSource<String> source = readWikiDump(config, env);
DataSource<String> refs = readRefs(config, env);
/* final FlatMapOperator<String, Tuple2<String, ExtractedMathPDDocument>> extractedMathPdSnippetsSources = source.flatMap(new TextExtractorMapper());
// first, merge all pages of one doc to one doc
DataSet<Tuple2<String, ExtractedMathPDDocument>> extractedMathPdDocumentsSources = aggregateSnippetsToSingleDocs(extractedMathPdSnippetsSources);
// write to disk
LOGGER.info("writing preprocesssed input to disk at {}", preprocessedRefsFiles
extractedMathPdDocumentsSources.writeAsFormattedText(preprocessedSourcesFiles,
new TextOutputFormat.TextFormatter<Tuple2<String, ExtractedMathPDDocument>>() {
@Override
public String format(Tuple2<String, ExtractedMathPDDocument> stringExtractedMathPDDocumentTuple2) {
return ExtractedMathPDDocumentMapper.getFormattedWritableText(stringExtractedMathPDDocumentTuple2.f1);
}
});*/
// now for the refs
final FlatMapOperator<String, Tuple2<String, ExtractedMathPDDocument>> extractedMathPdSnippetsRefs = refs.flatMap(new TextExtractorMapper());
// first, merge all pages of one doc to one doc
final DataSet<Tuple2<String, ExtractedMathPDDocument>> extractedMathPdDocumentsRefs = aggregateSnippetsToSingleDocs(extractedMathPdSnippetsRefs);
// write to disk
LOGGER.info("writing preprocesssed refs to disk at {}", preprocessedRefsFiles);
extractedMathPdDocumentsRefs.writeAsFormattedText(preprocessedRefsFiles,
new TextOutputFormat.TextFormatter<Tuple2<String, ExtractedMathPDDocument>>() {
@Override
public String format(Tuple2<String, ExtractedMathPDDocument> stringExtractedMathPDDocumentTuple2) {
LOGGER.info("input-ref {}: {}", stringExtractedMathPDDocumentTuple2.f0, stringExtractedMathPDDocumentTuple2.f1);
final String output = ExtractedMathPDDocumentMapper.getFormattedWritableText(stringExtractedMathPDDocumentTuple2.f1);
LOGGER.info("output-ref {}: {}", stringExtractedMathPDDocumentTuple2.f0, output);
return output;
}
});
} else {
final DataSet<Tuple2<String, ExtractedMathPDDocument>> extractedMathPdDocumentsSources = readPreprocessedFiles(preprocessedSourcesFiles, env).flatMap(new ExtractedMathPDDocumentMapper());
final DataSet<Tuple2<String, ExtractedMathPDDocument>> extractedMathPdDocumentsRefs = readPreprocessedFiles(preprocessedRefsFiles, env).flatMap(new ExtractedMathPDDocumentMapper());
GroupReduceOperator<Tuple2<Tuple2<String, ExtractedMathPDDocument>, Tuple3<String, String, Double>>, Tuple2<String, ExtractedMathPDDocument>> extractedMathPDDocsWithTFIDF = null;
if (IS_MODE_TFIDF) {
//noinspection Convert2Lambda
final GroupReduceOperator<Tuple3<String, String, Double>, Tuple3<String, String, Double>> corpusWideElementFrequenciesByDimension = extractedMathPdDocumentsSources
.union(extractedMathPdDocumentsRefs)
.flatMap(new FlatMapFunction<Tuple2<String, ExtractedMathPDDocument>, Tuple3<String, String, Double>>() {
@Override
public void flatMap(Tuple2<String, ExtractedMathPDDocument> stringExtractedMathPDDocumentTuple2, Collector<Tuple3<String, String, Double>> collector) throws Exception {
final ExtractedMathPDDocument curDoc = stringExtractedMathPDDocumentTuple2.f1;
collectElementFrequencies(curDoc.getHistogramBvar(), "bvar", collector);
collectElementFrequencies(curDoc.getHistogramCi(), "ci", collector);
collectElementFrequencies(curDoc.getHistogramCn(), "cn", collector);
collectElementFrequencies(curDoc.getHistogramCsymbol(), "csymbol", collector);
}
})
.groupBy(0, 1)
.reduceGroup(new GroupReduceFunction<Tuple3<String, String, Double>, Tuple3<String, String, Double>>() {
@Override
public void reduce(Iterable<Tuple3<String, String, Double>> iterable, Collector<Tuple3<String, String, Double>> collector) throws Exception {
final HashMap<Tuple2<String, String>, Double> freqsInCorpus = new HashMap<>();
for (Tuple3<String, String, Double> i : iterable) {
final Tuple2<String, String> key = new Tuple2<>(i.f0, i.f1);
freqsInCorpus.put(key, freqsInCorpus.getOrDefault(key, 0.0) + i.f2);
}
for (Tuple2<String, String> key : freqsInCorpus.keySet()) {
collector.collect(new Tuple3<>(key.f0, key.f1, freqsInCorpus.get(key)));
}
}
});
// at this point we have in corpusWideElementFrequenciesByDimension the DF over all documents for each element in all dimensions (verified)
corpusWideElementFrequenciesByDimension.writeAsCsv(config.getOutputDir() + "_DF");
// now convert the absolute histograms into tfidf histograms
extractedMathPDDocsWithTFIDF =
extractedMathPdDocumentsSources
.cross(corpusWideElementFrequenciesByDimension)
.reduceGroup(new GroupReduceFunction<Tuple2<Tuple2<String, ExtractedMathPDDocument>, Tuple3<String, String, Double>>, Tuple2<String, ExtractedMathPDDocument>>() {
@Override
public void reduce(Iterable<Tuple2<Tuple2<String, ExtractedMathPDDocument>, Tuple3<String, String, Double>>> iterable,
Collector<Tuple2<String, ExtractedMathPDDocument>> collector) throws Exception {
final HashMap<String, ExtractedMathPDDocument> tfidfDocs = new HashMap<>();
for (Tuple2<Tuple2<String, ExtractedMathPDDocument>, Tuple3<String, String, Double>> pair : iterable) {
final ExtractedMathPDDocument curDoc = pair.f0.f1;
final String name = curDoc.getName();
final Tuple3<String, String, Double> curIDFTriple = pair.f1;
// get to tfidf doc
ExtractedMathPDDocument curTfidfDoc = tfidfDocs.get(name);
if (curTfidfDoc == null) {
curTfidfDoc = new ExtractedMathPDDocument(curDoc.title, curDoc.text);
curTfidfDoc.setName(curDoc.getName());
curTfidfDoc.setPage(curDoc.getPage());
tfidfDocs.put(name, curTfidfDoc);
}
convertAbsoluteHistogramToTFIDFHistogram(curDoc, curTfidfDoc, curIDFTriple.f0, curIDFTriple.f1, curIDFTriple.f2);
}
for (String name : tfidfDocs.keySet()) {
collector.collect(new Tuple2<>(name, tfidfDocs.get(name)));
}
}
});
}
DataSet distancesAndSectionPairs =
IS_MODE_TFIDF ? extractedMathPDDocsWithTFIDF : extractedMathPdDocumentsSources // if in TFIDF_MODE use tfidf docs, otherwise absolute frequency histogram docs
.groupBy(0)
.reduceGroup(new GroupReduceFunction<Tuple2<String, ExtractedMathPDDocument>, ExtractedMathPDDocument>() {
@Override
public void reduce(Iterable<Tuple2<String, ExtractedMathPDDocument>> iterable, Collector<ExtractedMathPDDocument> collector) throws Exception {
ExtractedMathPDDocument tmpDoc = null;
for (Tuple2<String, ExtractedMathPDDocument> i : iterable) {
if (tmpDoc == null) {
tmpDoc = i.f1;
} else {
tmpDoc.mergeOtherIntoThis(i.f1);
}
}
collector.collect(tmpDoc);
}
})
.cross(extractedMathPdDocumentsRefs
.groupBy(0)
.reduceGroup(new GroupReduceFunction<Tuple2<String, ExtractedMathPDDocument>, ExtractedMathPDDocument>() {
@Override
public void reduce(Iterable<Tuple2<String, ExtractedMathPDDocument>> iterable, Collector<ExtractedMathPDDocument> collector) throws Exception {
ExtractedMathPDDocument tmpDoc = null;
for (Tuple2<String, ExtractedMathPDDocument> i : iterable) {
if (tmpDoc == null) {
tmpDoc = i.f1;
} else {
tmpDoc.mergeOtherIntoThis(i.f1);
}
}
collector.collect(tmpDoc);
}
})
)
.reduceGroup(new GroupReduceFunction<Tuple2<ExtractedMathPDDocument, ExtractedMathPDDocument>, Tuple7<String, String, Double, Double, Double, Double, Double>>() {
@Override
public void reduce(Iterable<Tuple2<ExtractedMathPDDocument, ExtractedMathPDDocument>> iterable, Collector<Tuple7<String, String, Double, Double, Double, Double, Double>> collector) throws Exception {
for (Tuple2<ExtractedMathPDDocument, ExtractedMathPDDocument> i : iterable) {
if (i.f0 == null || i.f1 == null)
continue;
// skip one diagonal half of the matrix
//if (!i.f0.getId().contains("Original"))
// continue;
// only check Original against Plagiarism (not against other Originals)
//if (!i.f1.getId().contains("Plagiarism"))
// continue;
// Tuple4 contains (if cosine is used, the term distance actually means similarity, i.e.,
// -1=opposite, 0=unrelated, 1=same doc
// 1) total distance (accumulated distance of all others) - makes no sense in case of cosine distance
// 2) numbers
// 3) operators
// 4) identifiers
// 5) bound variables
Tuple4<Double, Double, Double, Double> distanceAllFeatures;
if (IS_MODE_TFIDF) {
distanceAllFeatures = Distances.distanceCosineAllFeatures(i.f0, i.f1);
} else {
distanceAllFeatures = Distances.distanceRelativeAllFeatures(i.f0, i.f1);
}
final Tuple7<String, String, Double, Double, Double, Double, Double> resultLine = new Tuple7<>(
i.f0.getId(),
i.f1.getId(),
Math.abs(distanceAllFeatures.f0) + Math.abs(distanceAllFeatures.f1) + Math.abs(distanceAllFeatures.f2) + Math.abs(distanceAllFeatures.f3),
distanceAllFeatures.f0,
distanceAllFeatures.f1,
distanceAllFeatures.f2,
distanceAllFeatures.f3
);
collector.collect(resultLine);
}
}
})
.sortPartition(1, Order.ASCENDING);
distancesAndSectionPairs.writeAsCsv(config.getOutputDir(), WriteMode.OVERWRITE);
// we can now use the distances and section pairs dataset to aggregate the distances on document level in distance bins
//noinspection Convert2Lambda
DataSet binnedDistancesForPairs =
distancesAndSectionPairs
.reduceGroup(new GroupReduceFunction<
Tuple7<String, String, Double, Double, Double, Double, Double>,
Tuple5<String, String, Double, Double, Double>>() {
@Override
public void reduce(Iterable<Tuple7<String, String, Double, Double, Double, Double, Double>> iterable, Collector<Tuple5<String, String, Double, Double, Double>> collector) throws Exception {
// histogram will contain as a key a tuple2 of the names of the two documents from the pair; and the bin
// the value will be the frequency of that bin in that pair of documents
final HashMap<Tuple4<String, String, Double, Double>, Double> histogramPairOfNameAndBinWithFrequency = new HashMap<>();
final HashMap<Tuple2<String, String>, Double> histogramPairOfNameWithFrequency = new HashMap<>();
for (Tuple7<String, String, Double, Double, Double, Double, Double> curPairWithDistances : iterable) {
final String id0 = curPairWithDistances.f0;
final String id1 = curPairWithDistances.f1;
final String name0 = ExtractedMathPDDocument.getNameFromId(id0);
final String name1 = ExtractedMathPDDocument.getNameFromId(id1);
double distance = curPairWithDistances.f2 / 4.0; // take the accumulated distance and normalize it
// the key3
final Tuple4<String, String, Double, Double> key =
new Tuple4<>(
name0,
name1,
getBinBoundary(distance, 0.2, true),
getBinBoundary(distance, 0.2, false));
final Tuple2<String, String> keyName = new Tuple2<String, String>(name0, name1);
// look up if something has been stored under this key
Double frequencyOfCurKey = histogramPairOfNameAndBinWithFrequency.getOrDefault(key, 0.0);
histogramPairOfNameAndBinWithFrequency.put(key, frequencyOfCurKey + 1.0);
// also update the pair's total frequency
histogramPairOfNameWithFrequency.put(keyName, histogramPairOfNameWithFrequency.getOrDefault(keyName, 0.0) + 1.0);
}
for (Tuple4<String, String, Double, Double> key : histogramPairOfNameAndBinWithFrequency.keySet()) {
collector.collect(new Tuple5<>(key.f0, key.f1, key.f2, key.f3, histogramPairOfNameAndBinWithFrequency.get(key) / histogramPairOfNameWithFrequency.get(new Tuple2<>(key.f0, key.f1))));
}
}
})
.sortPartition(0, Order.ASCENDING)
.sortPartition(1, Order.ASCENDING);
binnedDistancesForPairs.writeAsCsv(config.getOutputDir() + "_binned", WriteMode.OVERWRITE);
}
env.execute("You ad could be here! Call 4451");
}
private static double getBinBoundary(double value, double binWidth, boolean isLower) {
double flooredDivision = Math.floor(value / binWidth);
double binBoundary;
if (isLower)
binBoundary = binWidth * flooredDivision;
else
binBoundary = binWidth * (flooredDivision + 1);
return Double.valueOf(decimalFormat.format(binBoundary));
}
public static DataSource<String> readWikiDump(FlinkPdCommandConfig config, ExecutionEnvironment env) {
Path filePath = new Path(config.getDataset());
TextInputFormat inp = new TextInputFormat(filePath);
inp.setCharsetName("UTF-8");
inp.setDelimiter("</ARXIVFILESPLIT>");
return env.readFile(inp, config.getDataset());
}
public static DataSource<String> readRefs(FlinkPdCommandConfig config, ExecutionEnvironment env) {
Path filePath = new Path(config.getRef());
TextInputFormat inp = new TextInputFormat(filePath);
inp.setCharsetName("UTF-8");
inp.setDelimiter("</ARXIVFILESPLIT>");
return env.readFile(inp, config.getRef());
}
public static DataSource<String> readPreprocessedFiles(String pathname, ExecutionEnvironment env) {
Path filePath = new Path(pathname);
TextInputFormat inp = new TextInputFormat(filePath);
inp.setCharsetName("UTF-8");
return env.readFile(inp, pathname);
}
public String runFromText(FlinkPdCommandConfig config, String input) throws Exception {
final JsonSerializerMapper<Object> serializerMapper = new JsonSerializerMapper<>();
return serializerMapper.map(outDocFromText(config, input));
}
public WikiDocumentOutput outDocFromText(FlinkPdCommandConfig config, String input) throws Exception {
final TextAnnotatorMapper textAnnotatorMapper = new TextAnnotatorMapper(config);
textAnnotatorMapper.open(null);
final CreateCandidatesMapper candidatesMapper = new CreateCandidatesMapper(config);
final ParsedWikiDocument parsedWikiDocument = textAnnotatorMapper.parse(input);
return candidatesMapper.map(parsedWikiDocument);
}
}
| base64 writer
| mathosphere-core/src/main/java/com/formulasearchengine/mathosphere/mathpd/FlinkPd.java | base64 writer | <ide><path>athosphere-core/src/main/java/com/formulasearchengine/mathosphere/mathpd/FlinkPd.java
<ide> import org.slf4j.LoggerFactory;
<ide>
<ide> import java.text.DecimalFormat;
<add>import java.util.Base64;
<ide> import java.util.HashMap;
<ide>
<ide> public class FlinkPd {
<ide> LOGGER.info("input-ref {}: {}", stringExtractedMathPDDocumentTuple2.f0, stringExtractedMathPDDocumentTuple2.f1);
<ide> final String output = ExtractedMathPDDocumentMapper.getFormattedWritableText(stringExtractedMathPDDocumentTuple2.f1);
<ide> LOGGER.info("output-ref {}: {}", stringExtractedMathPDDocumentTuple2.f0, output);
<del> return output;
<add> final String outputB64 = Base64.getEncoder().encodeToString(output.getBytes());
<add> LOGGER.info("output-ref {}: {}", stringExtractedMathPDDocumentTuple2.f0, outputB64);
<add> return outputB64;
<ide> }
<ide> });
<ide> |
|
Java | mit | f4c79fbe7355df63c7ca3f12a17a23378b7c6c17 | 0 | simonkro/do_mysql,simonkro/do_mysql,simonkro/do_mysql | package do_oracle;
import java.sql.Connection;
import java.sql.Statement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Properties;
import oracle.jdbc.OraclePreparedStatement;
import oracle.jdbc.OracleTypes;
import java.sql.ParameterMetaData;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.jruby.Ruby;
import org.jruby.runtime.builtin.IRubyObject;
import data_objects.RubyType;
import data_objects.drivers.AbstractDriverDefinition;
public class OracleDriverDefinition extends AbstractDriverDefinition {
public final static String URI_SCHEME = "oracle";
// . will be replaced with : in Connection.java before connection
public final static String JDBC_URI_SCHEME = "oracle.thin";
public final static String RUBY_MODULE_NAME = "Oracle";
public OracleDriverDefinition() {
super(URI_SCHEME, JDBC_URI_SCHEME, RUBY_MODULE_NAME);
}
@Override
public void setPreparedStatementParam(PreparedStatement ps,
IRubyObject arg, int idx) throws SQLException {
switch (RubyType.getRubyType(arg.getType().getName())) {
case NIL:
// XXX ps.getParameterMetaData().getParameterType(idx) produces
// com.mysql.jdbc.ResultSetMetaData:397:in `getField': java.lang.NullPointerException
// from com.mysql.jdbc.ResultSetMetaData:275:in `getColumnType'
ps.setNull(idx, Types.NULL);
break;
default:
super.setPreparedStatementParam(ps, arg, idx);
}
}
@Override
public boolean registerPreparedStatementReturnParam(String sqlText, PreparedStatement ps, int idx) throws SQLException {
OraclePreparedStatement ops = (OraclePreparedStatement) ps;
Pattern p = Pattern.compile("^\\s*INSERT.+RETURNING.+INTO\\s+", Pattern.CASE_INSENSITIVE);
Matcher m = p.matcher(sqlText);
if (m.find()) {
ops.registerReturnParameter(idx, Types.BIGINT);
return true;
}
return false;
}
@Override
public long getPreparedStatementReturnParam(PreparedStatement ps) throws SQLException {
OraclePreparedStatement ops = (OraclePreparedStatement) ps;
ResultSet rs = ops.getReturnResultSet();
try {
if (rs.next()) {
// Assuming that primary key will not be larger as long max value
return rs.getLong(1);
}
return 0;
} finally {
try {
rs.close();
} catch (Exception e) {}
}
}
@Override
public String prepareSqlTextForPs(String sqlText, IRubyObject[] args) {
String newSqlText = sqlText.replaceFirst(":insert_id", "?");
return newSqlText;
}
@Override
public boolean supportsJdbcGeneratedKeys()
{
return false;
}
@Override
public boolean supportsJdbcScrollableResultSets() {
// when set to true then getDouble and getBigDecimal is failing on BINARY_DOUBLE and BINARY_FLOAT columns
return false;
}
@Override
public boolean supportsConnectionEncodings()
{
return false;
}
@Override
public Properties getDefaultConnectionProperties() {
Properties props = new Properties();
// Set prefetch rows to 100 to increase fetching performance SELECTs with many rows
props.put("defaultRowPrefetch", "100");
// TODO: should clarify if this is needed for faster performance
// props.put("SetFloatAndDoubleUseBinary", "true");
return props;
}
@Override
public void afterConnectionCallback(Connection conn)
throws SQLException {
exec(conn, "alter session set nls_date_format = 'YYYY-MM-DD HH24:MI:SS'");
exec(conn, "alter session set nls_timestamp_format = 'YYYY-MM-DD HH24:MI:SS.FF'");
exec(conn, "alter session set nls_timestamp_tz_format = 'YYYY-MM-DD HH24:MI:SS.FF TZH:TZM'");
}
@Override
public String toString(PreparedStatement ps) {
try {
String sqlText = ((oracle.jdbc.driver.OracleStatement) ps).getOriginalSql();
// ParameterMetaData md = ps.getParameterMetaData();
return sqlText;
} catch (SQLException sqle) {
return "(exception in getOriginalSql)";
}
}
// for execution of session initialization SQL statements
private void exec(Connection conn, String sql)
throws SQLException {
Statement s = null;
try {
s = conn.createStatement();
s.execute(sql);
} finally {
if (s != null) {
try {
s.close();
} catch (SQLException sqle2) {
}
}
}
}
}
| do_oracle/ext-java/src/main/java/do_oracle/OracleDriverDefinition.java | package do_oracle;
import java.sql.Connection;
import java.sql.Statement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Properties;
import oracle.jdbc.OraclePreparedStatement;
import oracle.jdbc.OracleTypes;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.jruby.Ruby;
import org.jruby.runtime.builtin.IRubyObject;
import data_objects.RubyType;
import data_objects.drivers.AbstractDriverDefinition;
public class OracleDriverDefinition extends AbstractDriverDefinition {
public final static String URI_SCHEME = "oracle";
// . will be replaced with : in Connection.java before connection
public final static String JDBC_URI_SCHEME = "oracle.thin";
public final static String RUBY_MODULE_NAME = "Oracle";
public OracleDriverDefinition() {
super(URI_SCHEME, JDBC_URI_SCHEME, RUBY_MODULE_NAME);
}
@Override
public void setPreparedStatementParam(PreparedStatement ps,
IRubyObject arg, int idx) throws SQLException {
switch (RubyType.getRubyType(arg.getType().getName())) {
case NIL:
// XXX ps.getParameterMetaData().getParameterType(idx) produces
// com.mysql.jdbc.ResultSetMetaData:397:in `getField': java.lang.NullPointerException
// from com.mysql.jdbc.ResultSetMetaData:275:in `getColumnType'
ps.setNull(idx, Types.NULL);
break;
default:
super.setPreparedStatementParam(ps, arg, idx);
}
}
@Override
public boolean registerPreparedStatementReturnParam(String sqlText, PreparedStatement ps, int idx) throws SQLException {
OraclePreparedStatement ops = (OraclePreparedStatement) ps;
Pattern p = Pattern.compile("^\\s*INSERT.+RETURNING.+INTO\\s+", Pattern.CASE_INSENSITIVE);
Matcher m = p.matcher(sqlText);
if (m.find()) {
ops.registerReturnParameter(idx, Types.BIGINT);
return true;
}
return false;
}
@Override
public long getPreparedStatementReturnParam(PreparedStatement ps) throws SQLException {
OraclePreparedStatement ops = (OraclePreparedStatement) ps;
ResultSet rs = ops.getReturnResultSet();
try {
if (rs.next()) {
// Assuming that primary key will not be larger as long max value
return rs.getLong(1);
}
return 0;
} finally {
try {
rs.close();
} catch (Exception e) {}
}
}
@Override
public String prepareSqlTextForPs(String sqlText, IRubyObject[] args) {
String newSqlText = sqlText.replaceFirst(":insert_id", "?");
return newSqlText;
}
@Override
public boolean supportsJdbcGeneratedKeys()
{
return false;
}
@Override
public boolean supportsJdbcScrollableResultSets() {
return true;
}
@Override
public boolean supportsConnectionEncodings()
{
return false;
}
@Override
public Properties getDefaultConnectionProperties() {
Properties props = new Properties();
// Set prefetch rows to 100 to increase fetching performance SELECTs with many rows
props.put("defaultRowPrefetch", "100");
return props;
}
@Override
public void afterConnectionCallback(Connection conn)
throws SQLException {
exec(conn, "alter session set nls_date_format = 'YYYY-MM-DD HH24:MI:SS'");
exec(conn, "alter session set nls_timestamp_format = 'YYYY-MM-DD HH24:MI:SS.FF'");
exec(conn, "alter session set nls_timestamp_tz_format = 'YYYY-MM-DD HH24:MI:SS.FF TZH:TZM'");
}
// for execution of session initialization SQL statements
private void exec(Connection conn, String sql)
throws SQLException {
Statement s = null;
try {
s = conn.createStatement();
s.execute(sql);
} finally {
if (s != null) {
try {
s.close();
} catch (SQLException sqle2) {
}
}
}
}
}
| [do_oracle] JDBC driver - log SQL statements, do not use scrollable ResultSet
| do_oracle/ext-java/src/main/java/do_oracle/OracleDriverDefinition.java | [do_oracle] JDBC driver - log SQL statements, do not use scrollable ResultSet | <ide><path>o_oracle/ext-java/src/main/java/do_oracle/OracleDriverDefinition.java
<ide> import java.util.Properties;
<ide> import oracle.jdbc.OraclePreparedStatement;
<ide> import oracle.jdbc.OracleTypes;
<del>
<add>import java.sql.ParameterMetaData;
<add>
<ide> import java.util.regex.Matcher;
<ide> import java.util.regex.Pattern;
<ide>
<ide>
<ide> @Override
<ide> public boolean supportsJdbcScrollableResultSets() {
<del> return true;
<add> // when set to true then getDouble and getBigDecimal is failing on BINARY_DOUBLE and BINARY_FLOAT columns
<add> return false;
<ide> }
<ide>
<ide> @Override
<ide> Properties props = new Properties();
<ide> // Set prefetch rows to 100 to increase fetching performance SELECTs with many rows
<ide> props.put("defaultRowPrefetch", "100");
<add> // TODO: should clarify if this is needed for faster performance
<add> // props.put("SetFloatAndDoubleUseBinary", "true");
<ide> return props;
<ide> }
<ide>
<ide> exec(conn, "alter session set nls_date_format = 'YYYY-MM-DD HH24:MI:SS'");
<ide> exec(conn, "alter session set nls_timestamp_format = 'YYYY-MM-DD HH24:MI:SS.FF'");
<ide> exec(conn, "alter session set nls_timestamp_tz_format = 'YYYY-MM-DD HH24:MI:SS.FF TZH:TZM'");
<add> }
<add>
<add> @Override
<add> public String toString(PreparedStatement ps) {
<add> try {
<add> String sqlText = ((oracle.jdbc.driver.OracleStatement) ps).getOriginalSql();
<add> // ParameterMetaData md = ps.getParameterMetaData();
<add> return sqlText;
<add> } catch (SQLException sqle) {
<add> return "(exception in getOriginalSql)";
<add> }
<ide> }
<ide>
<ide> // for execution of session initialization SQL statements |
|
Java | apache-2.0 | c63480d00f3690d43843317b988f89279c0aaab5 | 0 | digipost/signature-api-specification | /**
* Copyright (C) Posten Norge AS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package no.digipost.signature.api.xml;
public interface XMLDocument {
String getTitle();
String getDescription();
XMLHref getHref();
String getMime();
}
| jaxb/src/main/java/no/digipost/signature/api/xml/XMLDocument.java | /**
* Copyright (C) Posten Norge AS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package no.digipost.signature.api.xml;
public interface XMLDocument {
String getTitle();
XMLHref getHref();
String getMime();
}
| Readd description for xml doc
| jaxb/src/main/java/no/digipost/signature/api/xml/XMLDocument.java | Readd description for xml doc | <ide><path>axb/src/main/java/no/digipost/signature/api/xml/XMLDocument.java
<ide> public interface XMLDocument {
<ide>
<ide> String getTitle();
<add> String getDescription();
<ide> XMLHref getHref();
<ide> String getMime();
<ide> |
|
Java | mit | 253e2a48ce8867f53753ad6616d0898aea0b83f3 | 0 | heineman/algorithms-nutshell-2ed,heineman/algorithms-nutshell-2ed,heineman/algorithms-nutshell-2ed,heineman/algorithms-nutshell-2ed,heineman/algorithms-nutshell-2ed,heineman/algorithms-nutshell-2ed,heineman/algorithms-nutshell-2ed | package algs.model.tests.convexhull;
import org.junit.Test;
import algs.model.IPoint;
import algs.model.data.Generator;
import algs.model.data.points.UniformGenerator;
import algs.model.twod.TwoDPoint;
import junit.framework.TestCase;
public class ComparativeHullTest extends TestCase {
@Test
public void testConstruction() {
int n = 256;
int numThreads = 2;
Generator<IPoint> g = new UniformGenerator();
IPoint[] master = g.generate(n);
// make copy to keep original in its shape.
IPoint[] points = new IPoint[master.length];
for (int i = 0; i < master.length; i++) {
points[i] = new TwoDPoint(master[i]);
}
// compute natively.
IPoint[] hull1 = new algs.model.problems.convexhull.andrew.ConvexHullScan().compute(points);
// compute multithread.
IPoint[] hull1a = new algs.model.problems.convexhull.parallel.ConvexHullScan(numThreads).compute(points);
// compute heuristic (single thread)
IPoint[] reduced = algs.model.problems.convexhull.AklToussaint.reduce(points);
IPoint[] hull2 = new algs.model.problems.convexhull.andrew.ConvexHullScan().compute(reduced);
// compute full Parallel Heuristic (both multi-thread)
IPoint[] reduced2a = algs.model.problems.convexhull.AklToussaint.reduce(points);
IPoint[] hull2a = new algs.model.problems.convexhull.parallel.ConvexHullScan(numThreads).compute(reduced2a);
// sanity check.
assertEquals (hull1.length, hull2.length);
assertEquals (hull1a.length, hull1.length);
assertEquals (hull2a.length, hull2.length);
assertEquals (reduced2a.length, reduced.length);
}
@Test
public void testParallel() {
int n = 256;
Generator<IPoint> g = new UniformGenerator();
IPoint[] master = g.generate(n);
// make copy to keep original in its shape.
IPoint[] points = new IPoint[master.length];
IPoint[] points2 = new IPoint[master.length];
for (int i = 0; i < master.length; i++) {
points[i] = new TwoDPoint(master[i]);
points2[i] = new TwoDPoint(master[i]);
}
// compute heuristic (single thread)
IPoint[] reduced = algs.model.problems.convexhull.AklToussaint.reduce(points);
// compute multithread.
IPoint[] reduced2 = algs.model.problems.convexhull.parallel.AklToussaint.reduce(points2);
// fix this!
assertEquals (reduced2.length, reduced.length);
}
}
| Tests/tests/algs/model/tests/convexhull/ComparativeHullTest.java | package algs.model.tests.convexhull;
import org.junit.Test;
import algs.model.IPoint;
import algs.model.data.Generator;
import algs.model.data.points.UniformGenerator;
import algs.model.twod.TwoDPoint;
import junit.framework.TestCase;
public class ComparativeHullTest extends TestCase {
@Test
public void testConstruction() {
int n = 256;
int numThreads = 2;
Generator<IPoint> g = new UniformGenerator();
IPoint[] master = g.generate(n);
// make copy to keep original in its shape.
IPoint[] points = new IPoint[master.length];
for (int i = 0; i < master.length; i++) {
points[i] = new TwoDPoint(master[i]);
}
// compute natively.
IPoint[] hull1 = new algs.model.problems.convexhull.andrew.ConvexHullScan().compute(points);
// compute multithread.
IPoint[] hull1a = new algs.model.problems.convexhull.parallel.ConvexHullScan(numThreads).compute(points);
// compute heuristic (single thread)
IPoint[] reduced = algs.model.problems.convexhull.AklToussaint.reduce(points);
IPoint[] hull2 = new algs.model.problems.convexhull.andrew.ConvexHullScan().compute(reduced);
// compute full Parallel Heuristic (both multi-thread)
IPoint[] reduced2a = algs.model.problems.convexhull.AklToussaint.reduce(points);
IPoint[] hull2a = new algs.model.problems.convexhull.parallel.ConvexHullScan(numThreads).compute(reduced2a);
// sanity check.
assertEquals (hull1.length, hull2.length);
assertEquals (hull1a.length, hull1.length);
assertEquals (hull2a.length, hull2.length);
assertEquals (reduced2a.length, reduced.length);
}
@Test
public void testParallel() {
int n = 256;
Generator<IPoint> g = new UniformGenerator();
IPoint[] master = g.generate(n);
// make copy to keep original in its shape.
IPoint[] points = new IPoint[master.length];
IPoint[] points2 = new IPoint[master.length];
for (int i = 0; i < master.length; i++) {
points[i] = new TwoDPoint(master[i]);
points2[i] = new TwoDPoint(master[i]);
}
// compute heuristic (single thread)
IPoint[] reduced = algs.model.problems.convexhull.AklToussaint.reduce(points);
// compute multithread.
IPoint[] reduced2 = algs.model.problems.convexhull.parallel.AklToussaint.reduce(points2);
assertEquals (reduced2.length, reduced.length);
}
}
| Fixed parallel Akl-Toussaint heuristic implementation | Tests/tests/algs/model/tests/convexhull/ComparativeHullTest.java | Fixed parallel Akl-Toussaint heuristic implementation | <ide><path>ests/tests/algs/model/tests/convexhull/ComparativeHullTest.java
<ide> points[i] = new TwoDPoint(master[i]);
<ide> points2[i] = new TwoDPoint(master[i]);
<ide> }
<add>
<ide> // compute heuristic (single thread)
<ide> IPoint[] reduced = algs.model.problems.convexhull.AklToussaint.reduce(points);
<ide>
<ide> // compute multithread.
<ide> IPoint[] reduced2 = algs.model.problems.convexhull.parallel.AklToussaint.reduce(points2);
<ide>
<del>
<add> // fix this!
<ide> assertEquals (reduced2.length, reduced.length);
<ide> }
<ide> } |
|
Java | apache-2.0 | 39eb0e0b520c8c9c50022c85b223efc57515a6dc | 0 | jvd10/apollo,ApolloDev/apollo,ApolloDev/apollo,jvd10/apollo,jvd10/apollo,jvd10/apollo,ApolloDev/apollo,ApolloDev/apollo | /* Copyright 2012 University of Pittsburgh
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package edu.pitt.apollo.apolloclient;
import java.io.File;
import java.math.BigInteger;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Calendar;
import java.util.GregorianCalendar;
import javax.xml.datatype.DatatypeConfigurationException;
import javax.xml.datatype.DatatypeFactory;
import javax.xml.datatype.XMLGregorianCalendar;
import javax.xml.namespace.QName;
import edu.pitt.apollo.service.apolloservice.v2_0.ApolloServiceEI;
import edu.pitt.apollo.service.apolloservice.v2_0.ApolloServiceV20;
import edu.pitt.apollo.types.v2_0.ApolloPathogenCode;
import edu.pitt.apollo.types.v2_0.ApolloSoftwareType;
import edu.pitt.apollo.types.v2_0.Authentication;
import edu.pitt.apollo.types.v2_0.ControlStrategyTargetPopulationsAndPrioritization;
import edu.pitt.apollo.types.v2_0.FixedStartTime;
import edu.pitt.apollo.types.v2_0.IndividualTreatmentControlStrategy;
import edu.pitt.apollo.types.v2_0.Infection;
import edu.pitt.apollo.types.v2_0.InfectionAcquisition;
import edu.pitt.apollo.types.v2_0.InfectionState;
import edu.pitt.apollo.types.v2_0.InfectiousDisease;
import edu.pitt.apollo.types.v2_0.InfectiousDiseaseScenario;
import edu.pitt.apollo.types.v2_0.Location;
import edu.pitt.apollo.types.v2_0.LocationDefinition;
import edu.pitt.apollo.types.v2_0.MethodCallStatus;
import edu.pitt.apollo.types.v2_0.MethodCallStatusEnum;
import edu.pitt.apollo.types.v2_0.NumericParameterValue;
import edu.pitt.apollo.types.v2_0.PopulationInfectionAndImmunityCensus;
import edu.pitt.apollo.types.v2_0.PopulationInfectionAndImmunityCensusData;
import edu.pitt.apollo.types.v2_0.PopulationInfectionAndImmunityCensusDataCell;
import edu.pitt.apollo.types.v2_0.ProbabilisticParameterValue;
import edu.pitt.apollo.types.v2_0.RunAndSoftwareIdentification;
import edu.pitt.apollo.types.v2_0.RunSimulationMessage;
import edu.pitt.apollo.types.v2_0.RunVisualizationMessage;
import edu.pitt.apollo.types.v2_0.SimulatorTimeSpecification;
import edu.pitt.apollo.types.v2_0.SoftwareIdentification;
import edu.pitt.apollo.types.v2_0.TimeStepUnit;
import edu.pitt.apollo.types.v2_0.UnitOfMeasure;
import edu.pitt.apollo.types.v2_0.UrlOutputResource;
import edu.pitt.apollo.types.v2_0.Vaccination;
import edu.pitt.apollo.types.v2_0.VaccinationEfficacyForSimulatorConfiguration;
import edu.pitt.apollo.types.v2_0.VaccinationPreventableOutcome;
import edu.pitt.apollo.types.v2_0.Vaccine;
import edu.pitt.apollo.types.v2_0.VisualizationOptions;
import edu.pitt.apollo.types.v2_0.VisualizerResult;
public class WSClient {
public static final String WSDL_LOC = "http://research.rods.pitt.edu/apolloservice2.0/services/apolloservice?wsdl";
private ApolloServiceEI port;
private static final QName SERVICE_NAME = new QName(
"http://service.apollo.pitt.edu/apolloservice/v2_0/",
"ApolloService_v2.0");
private WSClient(URL wsdlURL) {
ApolloServiceV20 ss = new ApolloServiceV20(wsdlURL, SERVICE_NAME);
port = ss.getApolloServiceEndpoint();
}
public SoftwareIdentification getSoftwareIdentificationForSimulator() {
SoftwareIdentification softwareId = new SoftwareIdentification();
softwareId.setSoftwareDeveloper("UPitt,PSC,CMU");
softwareId.setSoftwareName("FRED");
softwareId.setSoftwareVersion("2.0.1_i");
softwareId.setSoftwareType(ApolloSoftwareType.SIMULATOR);
return softwareId;
}
private SoftwareIdentification getSoftwareIdentifiationForTimeSeriesVisualizer() {
SoftwareIdentification softwareId = new SoftwareIdentification();
softwareId.setSoftwareName("Image Visualizer"); // rename this
// timeseries or
// something!
softwareId.setSoftwareType(ApolloSoftwareType.VISUALIZER);
softwareId.setSoftwareVersion("1.0");
softwareId.setSoftwareDeveloper("UPitt");
return softwareId;
}
private SoftwareIdentification getSoftwareIdentifiationForGaia() {
SoftwareIdentification softwareId = new SoftwareIdentification();
softwareId.setSoftwareName("GAIA");
softwareId.setSoftwareType(ApolloSoftwareType.VISUALIZER);
softwareId.setSoftwareVersion("1.0");
softwareId.setSoftwareDeveloper("PSC");
return softwareId;
}
public Authentication getAuthentication() {
Authentication authentication = new Authentication();
authentication.setRequesterId("TutorialUser");
authentication.setRequesterPassword("TutorialPassword");
return authentication;
}
public SimulatorTimeSpecification getSimulatorTimeSpecification() {
SimulatorTimeSpecification timeSpec = new SimulatorTimeSpecification();
// the run length of the simulation is 90 days
timeSpec.setRunLength(new BigInteger("90"));
timeSpec.setTimeStepUnit(TimeStepUnit.DAY);
timeSpec.setTimeStepValue(1.0);
return timeSpec;
}
private PopulationInfectionAndImmunityCensus getPopulationInfectionAndImmunityCensus() {
PopulationInfectionAndImmunityCensus census = new PopulationInfectionAndImmunityCensus();
census.setDescription("Population of Allegheny County, Pennsylvania");
GregorianCalendar calendar = new GregorianCalendar();
calendar.set(Calendar.YEAR, 2009);
calendar.set(Calendar.MONTH, Calendar.SEPTEMBER);
calendar.set(Calendar.DAY_OF_MONTH, 1);
calendar.set(Calendar.HOUR, 0);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
XMLGregorianCalendar censusDate = null;
try {
censusDate = DatatypeFactory.newInstance().newXMLGregorianCalendar(
calendar);
} catch (DatatypeConfigurationException e) {
System.out.println("Error! Unable to set date, error was:"
+ e.getMessage());
System.exit(-1);
}
census.setDate(censusDate);
Location location = new Location();
location.setLocationCode("42003");
census.setLocation(location);
census.setPopulationSpecies("9606"); // homo sapiens
ApolloPathogenCode pathId = new ApolloPathogenCode();
pathId.setGisrnCladeName("H1N1");
pathId.setNcbiTaxonId("114727"); // Influenza A subtype H1N1
census.setPathogen(pathId);
PopulationInfectionAndImmunityCensusData data = new PopulationInfectionAndImmunityCensusData();
data.setLocation(location);
PopulationInfectionAndImmunityCensusDataCell susceptibleCell = new PopulationInfectionAndImmunityCensusDataCell();
susceptibleCell.setInfectionState(InfectionState.SUSCEPTIBLE);
susceptibleCell.setFractionInInfectionState(0.8);
PopulationInfectionAndImmunityCensusDataCell exposedCell = new PopulationInfectionAndImmunityCensusDataCell();
exposedCell.setInfectionState(InfectionState.EXPOSED);
exposedCell.setFractionInInfectionState(0.0);
PopulationInfectionAndImmunityCensusDataCell infectiousCell = new PopulationInfectionAndImmunityCensusDataCell();
infectiousCell.setInfectionState(InfectionState.INFECTIOUS);
infectiousCell.setFractionInInfectionState(0.05);
PopulationInfectionAndImmunityCensusDataCell recoveredCell = new PopulationInfectionAndImmunityCensusDataCell();
recoveredCell.setInfectionState(InfectionState.RECOVERED);
recoveredCell.setFractionInInfectionState(0.15);
data.getCensusDataCells().add(susceptibleCell);
data.getCensusDataCells().add(exposedCell);
data.getCensusDataCells().add(infectiousCell);
data.getCensusDataCells().add(recoveredCell);
census.setCensusData(data);
return census;
}
private InfectiousDisease getInfectiousDisease() {
InfectiousDisease disease = new InfectiousDisease();
disease.setDiseaseID("H1N1");
disease.setSpeciesWithDisease("9606"); // homo sapiens
ApolloPathogenCode pathId = new ApolloPathogenCode();
pathId.setGisrnCladeName("H1N1");
pathId.setNcbiTaxonId("114727"); // Influenza A subtype H1N1
disease.setCausalPathogen(pathId);
return disease;
}
private Infection getInfection() {
Infection infection = new Infection();
ApolloPathogenCode pathId = new ApolloPathogenCode();
pathId.setGisrnCladeName("H1N1");
pathId.setNcbiTaxonId("114727"); // Influenza A subtype H1N1
infection.setPathogenTaxonID(pathId);
infection.setHostTaxonID("9606"); // homo sapiens
NumericParameterValue infectiousPeriod = new NumericParameterValue();
infectiousPeriod.setUnitOfMeasure(UnitOfMeasure.DAYS);
infectiousPeriod.setValue(6.0);
infection.setInfectiousPeriodDuration(infectiousPeriod);
NumericParameterValue latentPeriod = new NumericParameterValue();
latentPeriod.setUnitOfMeasure(UnitOfMeasure.DAYS);
latentPeriod.setValue(2.0);
infection.setLatentPeriodDuration(latentPeriod);
InfectionAcquisition infectionAcquisition = new InfectionAcquisition();
infectionAcquisition.setPathogenTaxonID(pathId);
infectionAcquisition.setSusceptibleHostTaxonID("9606"); // homo sapiens
infectionAcquisition.setBasicReproductionNumber(1.3);
infection.getInfectionAcquisition().add(infectionAcquisition);
return infection;
}
private InfectiousDiseaseScenario getInfectiousDiseaseScenario() {
InfectiousDiseaseScenario scenario = new InfectiousDiseaseScenario();
LocationDefinition definition = new LocationDefinition();
definition.setDescription("Allegheny County, Pennsylvania");
// set the scenario location to Allegheny County
Location location = new Location();
location.setLocationCode("42003");
scenario.setLocation(location);
// set the scenario date to 2009/09/01
GregorianCalendar calendar = new GregorianCalendar();
calendar.set(Calendar.YEAR, 2009);
calendar.set(Calendar.MONTH, Calendar.SEPTEMBER);
calendar.set(Calendar.DAY_OF_MONTH, 1);
calendar.set(Calendar.HOUR, 0);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
// translate from Java style Calendar to an XML compatible calendar
XMLGregorianCalendar scenarioDate = null;
try {
scenarioDate = DatatypeFactory.newInstance()
.newXMLGregorianCalendar(calendar);
} catch (DatatypeConfigurationException e) {
System.out.println("Error! Unable to set date, error was:"
+ e.getMessage());
System.exit(-1);
}
scenario.setScenarioDate(scenarioDate);
scenario.getInfections().add(getInfection());
scenario.getDiseases().add(getInfectiousDisease());
scenario.getPopulationInfectionAndImmunityCensuses().add(
getPopulationInfectionAndImmunityCensus());
return scenario;
}
public String callRunSimulation() {
RunSimulationMessage message = new RunSimulationMessage();
message.setInfectiousDiseaseScenario(getInfectiousDiseaseScenario());
message.setAuthentication(getAuthentication());
message.setSimulatorIdentification(getSoftwareIdentificationForSimulator());
message.setSimulatorTimeSpecification(getSimulatorTimeSpecification());
message.getInfectiousDiseaseScenario().getInfectiousDiseaseControlStrategies().add(getVaccinationControlStrategy());
return port.runSimulation(message);
}
public MethodCallStatus checkStatusOfWebServiceCall(
RunAndSoftwareIdentification runAndSoftwareId) {
// give the simulator a chance to launch the simulation
try {
Thread.sleep(10000);
} catch (InterruptedException e1) {
// this is acceptable
}
while (true) {
MethodCallStatus status = port.getRunStatus(runAndSoftwareId);
switch (status.getStatus()) {
case AUTHENTICATION_FAILURE:
case UNAUTHORIZED:
System.out
.println("No authorization for this run! Error message is:"
+ status.getMessage());
return status;
case COMPLETED:
System.out.println("Run completed!");
return status;
case FAILED:
System.out.println("Run Failed! Error message is:"
+ status.getMessage());
return status;
case RUNNING:
case MOVING:
case QUEUED:
case HELD:
case EXITING:
case WAITING:
System.out.println("The "
+ runAndSoftwareId.getSoftwareId().getSoftwareName()
+ " run is active (" + status.getStatus().toString()
+ "). The status message is: " + status.getMessage());
try {
Thread.sleep(20000);
} catch (InterruptedException e) {
}
}
}
}
private void getResourcesFromVisualizer(String simulatorRunId,
SoftwareIdentification visualizerSoftwareIdentification) {
System.out.println("Visualizing runId" + simulatorRunId + " using the "
+ visualizerSoftwareIdentification.getSoftwareName()
+ " visualizer...");
RunVisualizationMessage runVisualizationMessage = new RunVisualizationMessage();
VisualizationOptions options = new VisualizationOptions();
options.setRunId(simulatorRunId);
options.setLocation("42003");
options.setOutputFormat("default");
runVisualizationMessage.setVisualizationOptions(options);
runVisualizationMessage
.setVisualizerIdentification(visualizerSoftwareIdentification);
Authentication auth = new Authentication();
auth.setRequesterId("TutorialUser");
auth.setRequesterPassword("TutorialPassword");
runVisualizationMessage.setAuthentication(auth);
VisualizerResult visualizerResult = port
.runVisualization(runVisualizationMessage);
String visualizationRunId = visualizerResult.getRunId();
RunAndSoftwareIdentification visualizationRunAndSoftwareId = new RunAndSoftwareIdentification();
visualizationRunAndSoftwareId.setRunId(visualizationRunId);
visualizationRunAndSoftwareId
.setSoftwareId(visualizerSoftwareIdentification);
if (checkStatusOfWebServiceCall(visualizationRunAndSoftwareId)
.getStatus() == MethodCallStatusEnum.COMPLETED) {
System.out
.println("The following resources were returned from the "
+ visualizerSoftwareIdentification
.getSoftwareName() + " visualizer:");
for (UrlOutputResource r : visualizerResult
.getVisualizerOutputResource()) {
System.out.println("\t" + r.getURL());
}
}
}
private Vaccination getVaccination() {
Vaccination vacc = new Vaccination();
vacc.setDescription("H1N1 Vaccine");
vacc.setNumDosesInTreatmentCourse(new BigInteger("1"));
vacc.setSpeciesOfTreatedOrganisms("Homo sapiens");
vacc.getTreatmentContraindications();
Vaccine vaccine = new Vaccine();
vaccine.setDescription("Influenza A (H1N1) 2009 Monovalent Vaccine");
vacc.setVaccine(vaccine);
VaccinationEfficacyForSimulatorConfiguration vesc = new VaccinationEfficacyForSimulatorConfiguration();
ApolloPathogenCode strain = new ApolloPathogenCode();
strain.setNcbiTaxonId("114727");
// strain.setGisrnCladeName("A/(H3N2) Victoria/361//2011-like");
vesc.setStrainIdentifier(strain);
vesc.setForVaccinationPreventableOutcome(VaccinationPreventableOutcome.INFECTION);
// vesc.setTreatment(vacc);
// vesc.setTreatment(t);
vesc.setVaccineIdentifier("Influenza A (H1N1) 2009 Monovalent Vaccine");
vesc.setAverageVaccinationEfficacy(0.47);
vesc.setDescription("The vaccination efficacy for the Influenza A (H1N1) 2009 Monovalent Vaccine");
vacc.getVaccinationEfficacies().add(vesc);
return vacc;
}
private ProbabilisticParameterValue getControlStrategyCompilance() {
ProbabilisticParameterValue compliance = new ProbabilisticParameterValue();
compliance.setValue(0.5);
return compliance;
}
private ControlStrategyTargetPopulationsAndPrioritization getTargetPopulationsAndPrioritizations() {
ControlStrategyTargetPopulationsAndPrioritization targetPopulationsAndPrioritization = new ControlStrategyTargetPopulationsAndPrioritization();
targetPopulationsAndPrioritization
.setControlStrategyNamedPrioritizationScheme("ACIP");
return targetPopulationsAndPrioritization;
}
private NumericParameterValue getResponseDelay() {
NumericParameterValue responseDelay = new NumericParameterValue();
responseDelay.setUnitOfMeasure(UnitOfMeasure.DAYS);
responseDelay.setValue(0d);
return responseDelay;
}
private FixedStartTime getFixedStartTime() {
FixedStartTime fixedStartTime = new FixedStartTime();
fixedStartTime.setStartTimeRelativeToScenarioDate(new BigInteger("0"));
fixedStartTime.setStopTimeRelativeToScenarioDate(new BigInteger("90"));
return fixedStartTime;
}
private IndividualTreatmentControlStrategy getVaccinationControlStrategy() {
IndividualTreatmentControlStrategy vaccinationControlMeasure = new IndividualTreatmentControlStrategy();
vaccinationControlMeasure.setControlStrategyCompliance(getControlStrategyCompilance());
vaccinationControlMeasure.setControlStrategyReactiveEndPointFraction(1.0);
vaccinationControlMeasure.setControlStrategyResponseDelay(getResponseDelay());
vaccinationControlMeasure.setControlStrategyStartTime(getFixedStartTime());
vaccinationControlMeasure.setDescription("An example vaccination control strategy.");
vaccinationControlMeasure.setIndividualTreatment(getVaccination());
vaccinationControlMeasure.setTargetPopulationsAndPrioritizations(getTargetPopulationsAndPrioritizations());
for (int i = 0; i < 90; i++)
vaccinationControlMeasure.getSupplySchedule().add(new BigInteger("3500"));
for (int i = 0; i < 90; i++)
vaccinationControlMeasure.getAdministrationCapacity().add(new BigInteger("3500"));
return vaccinationControlMeasure;
}
public static void main(String args[]) throws java.lang.Exception {
URL wsdlURL = ApolloServiceV20.WSDL_LOCATION;
if (args.length > 0 && args[0] != null && !"".equals(args[0])) {
File wsdlFile = new File(args[0]);
try {
if (wsdlFile.exists()) {
wsdlURL = wsdlFile.toURI().toURL();
} else {
wsdlURL = new URL(args[0]);
}
} catch (MalformedURLException e) {
e.printStackTrace();
}
}
WSClient client = new WSClient(
wsdlURL);
String simulationRunId = client.callRunSimulation();
System.out.println("The simulator returned a runId of "
+ simulationRunId);
RunAndSoftwareIdentification runAndSoftwareId = new RunAndSoftwareIdentification();
runAndSoftwareId.setSoftwareId(client
.getSoftwareIdentificationForSimulator());
runAndSoftwareId.setRunId(simulationRunId);
MethodCallStatus status = client
.checkStatusOfWebServiceCall(runAndSoftwareId);
if (status.getStatus() == MethodCallStatusEnum.COMPLETED) {
client.getResourcesFromVisualizer(simulationRunId,
client.getSoftwareIdentifiationForTimeSeriesVisualizer());
client.getResourcesFromVisualizer(simulationRunId,
client.getSoftwareIdentifiationForGaia());
}
}
}
| apollo-ws/service-skeletons/java/trunk/apollo-service-client/src/main/java/edu/pitt/apollo/apolloclient/WSClient.java | /* Copyright 2012 University of Pittsburgh
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package edu.pitt.apollo.apolloclient;
import java.io.File;
import java.math.BigInteger;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.util.List;
import javax.xml.datatype.DatatypeConfigurationException;
import javax.xml.datatype.DatatypeFactory;
import javax.xml.datatype.XMLGregorianCalendar;
import javax.xml.namespace.QName;
import edu.pitt.apollo.service.apolloservice.v2_0.ApolloServiceEI;
import edu.pitt.apollo.service.apolloservice.v2_0.ApolloServiceV20;
import edu.pitt.apollo.types.v2_0.ApolloPathogenCode;
import edu.pitt.apollo.types.v2_0.ApolloSoftwareType;
import edu.pitt.apollo.types.v2_0.Authentication;
import edu.pitt.apollo.types.v2_0.GetPopulationAndEnvironmentCensusResult;
import edu.pitt.apollo.types.v2_0.GetScenarioLocationCodesSupportedBySimulatorResult;
import edu.pitt.apollo.types.v2_0.Infection;
import edu.pitt.apollo.types.v2_0.InfectionAcquisition;
import edu.pitt.apollo.types.v2_0.InfectionState;
import edu.pitt.apollo.types.v2_0.InfectiousDisease;
import edu.pitt.apollo.types.v2_0.InfectiousDiseaseScenario;
import edu.pitt.apollo.types.v2_0.Location;
import edu.pitt.apollo.types.v2_0.LocationDefinition;
import edu.pitt.apollo.types.v2_0.MethodCallStatus;
import edu.pitt.apollo.types.v2_0.MethodCallStatusEnum;
import edu.pitt.apollo.types.v2_0.NumericParameterValue;
import edu.pitt.apollo.types.v2_0.PopulationAndEnvironmentCensus;
import edu.pitt.apollo.types.v2_0.PopulationInfectionAndImmunityCensus;
import edu.pitt.apollo.types.v2_0.PopulationInfectionAndImmunityCensusData;
import edu.pitt.apollo.types.v2_0.PopulationInfectionAndImmunityCensusDataCell;
import edu.pitt.apollo.types.v2_0.RunAndSoftwareIdentification;
import edu.pitt.apollo.types.v2_0.RunSimulationMessage;
import edu.pitt.apollo.types.v2_0.RunVisualizationMessage;
import edu.pitt.apollo.types.v2_0.SimulatorTimeSpecification;
import edu.pitt.apollo.types.v2_0.SoftwareIdentification;
import edu.pitt.apollo.types.v2_0.TimeStepUnit;
import edu.pitt.apollo.types.v2_0.UnitOfMeasure;
import edu.pitt.apollo.types.v2_0.UrlOutputResource;
import edu.pitt.apollo.types.v2_0.VisualizationOptions;
import edu.pitt.apollo.types.v2_0.VisualizerResult;
public class WSClient {
public static final String WSDL_LOC = "http://research.rods.pitt.edu/apolloservice2.0/services/apolloservice?wsdl";
private ApolloServiceEI port;
private static final QName SERVICE_NAME = new QName(
"http://service.apollo.pitt.edu/apolloservice/v2_0/",
"ApolloService_v2.0");
private WSClient(URL wsdlURL) {
ApolloServiceV20 ss = new ApolloServiceV20(wsdlURL, SERVICE_NAME);
port = ss.getApolloServiceEndpoint();
}
public SoftwareIdentification getSoftwareIdentificationForFred() {
SoftwareIdentification softwareId = new SoftwareIdentification();
softwareId.setSoftwareDeveloper("UPitt,PSC,CMU");
softwareId.setSoftwareName("FRED");
softwareId.setSoftwareVersion("2.0.1_i");
softwareId.setSoftwareType(ApolloSoftwareType.SIMULATOR);
return softwareId;
}
private SoftwareIdentification getSoftwareIdentifiationForTimeSeriesVisualizer() {
SoftwareIdentification softwareId = new SoftwareIdentification();
softwareId.setSoftwareName("Image Visualizer"); // rename this
// timeseries or
// something!
softwareId.setSoftwareType(ApolloSoftwareType.VISUALIZER);
softwareId.setSoftwareVersion("1.0");
softwareId.setSoftwareDeveloper("UPitt");
return softwareId;
}
private SoftwareIdentification getSoftwareIdentifiationForGaia() {
SoftwareIdentification softwareId = new SoftwareIdentification();
softwareId.setSoftwareName("GAIA");
softwareId.setSoftwareType(ApolloSoftwareType.VISUALIZER);
softwareId.setSoftwareVersion("1.0");
softwareId.setSoftwareDeveloper("PSC");
return softwareId;
}
public Authentication getAuthentication() {
Authentication authentication = new Authentication();
authentication.setRequesterId("TutorialUser");
authentication.setRequesterPassword("TutorialPassword");
return authentication;
}
public SimulatorTimeSpecification getSimulatorTimeSpecification() {
SimulatorTimeSpecification timeSpec = new SimulatorTimeSpecification();
// the run length of the simulation is 90 days
timeSpec.setRunLength(new BigInteger("90"));
timeSpec.setTimeStepUnit(TimeStepUnit.DAY);
timeSpec.setTimeStepValue(1.0);
return timeSpec;
}
private PopulationInfectionAndImmunityCensus getPopulationInfectionAndImmunityCensus() {
PopulationInfectionAndImmunityCensus census = new PopulationInfectionAndImmunityCensus();
census.setDescription("Population of Allegheny County, Pennsylvania");
GregorianCalendar calendar = new GregorianCalendar();
XMLGregorianCalendar censusDate = null;
try {
censusDate = DatatypeFactory.newInstance().newXMLGregorianCalendar(
calendar);
} catch (DatatypeConfigurationException e) {
System.out.println("Error! Unable to set date, error was:"
+ e.getMessage());
System.exit(-1);
}
census.setDate(censusDate);
Location location = new Location();
location.setLocationCode("42003");
census.setLocation(location);
census.setPopulationSpecies("9606"); // homo sapiens
ApolloPathogenCode pathId = new ApolloPathogenCode();
pathId.setGisrnCladeName("H1N1");
pathId.setNcbiTaxonId("114727"); // Influenza A subtype H1N1
census.setPathogen(pathId);
PopulationInfectionAndImmunityCensusData data = new PopulationInfectionAndImmunityCensusData();
data.setLocation(location);
PopulationInfectionAndImmunityCensusDataCell susceptibleCell = new PopulationInfectionAndImmunityCensusDataCell();
susceptibleCell.setInfectionState(InfectionState.SUSCEPTIBLE);
susceptibleCell.setFractionInInfectionState(0.8);
PopulationInfectionAndImmunityCensusDataCell exposedCell = new PopulationInfectionAndImmunityCensusDataCell();
exposedCell.setInfectionState(InfectionState.EXPOSED);
exposedCell.setFractionInInfectionState(0.0);
PopulationInfectionAndImmunityCensusDataCell infectiousCell = new PopulationInfectionAndImmunityCensusDataCell();
infectiousCell.setInfectionState(InfectionState.INFECTIOUS);
infectiousCell.setFractionInInfectionState(0.05);
PopulationInfectionAndImmunityCensusDataCell recoveredCell = new PopulationInfectionAndImmunityCensusDataCell();
recoveredCell.setInfectionState(InfectionState.RECOVERED);
recoveredCell.setFractionInInfectionState(0.15);
data.getCensusDataCells().add(susceptibleCell);
data.getCensusDataCells().add(exposedCell);
data.getCensusDataCells().add(infectiousCell);
data.getCensusDataCells().add(recoveredCell);
census.setCensusData(data);
return census;
}
private InfectiousDisease getInfectiousDisease() {
InfectiousDisease disease = new InfectiousDisease();
disease.setDiseaseID("H1N1");
disease.setSpeciesWithDisease("9606"); // homo sapiens
ApolloPathogenCode pathId = new ApolloPathogenCode();
pathId.setGisrnCladeName("H1N1");
pathId.setNcbiTaxonId("114727"); // Influenza A subtype H1N1
disease.setCausalPathogen(pathId);
return disease;
}
private Infection getInfection() {
Infection infection = new Infection();
ApolloPathogenCode pathId = new ApolloPathogenCode();
pathId.setGisrnCladeName("H1N1");
pathId.setNcbiTaxonId("114727"); // Influenza A subtype H1N1
infection.setPathogenTaxonID(pathId);
infection.setHostTaxonID("9606"); // homo sapiens
NumericParameterValue infectiousPeriod = new NumericParameterValue();
infectiousPeriod.setUnitOfMeasure(UnitOfMeasure.DAYS);
infectiousPeriod.setValue(6.0);
infection.setInfectiousPeriodDuration(infectiousPeriod);
NumericParameterValue latentPeriod = new NumericParameterValue();
latentPeriod.setUnitOfMeasure(UnitOfMeasure.DAYS);
latentPeriod.setValue(2.0);
infection.setLatentPeriodDuration(latentPeriod);
InfectionAcquisition infectionAcquisition = new InfectionAcquisition();
infectionAcquisition.setPathogenTaxonID(pathId);
infectionAcquisition.setSusceptibleHostTaxonID("9606"); // homo sapiens
infectionAcquisition.setBasicReproductionNumber(1.3);
infection.getInfectionAcquisition().add(infectionAcquisition);
return infection;
}
private InfectiousDiseaseScenario getInfectiousDiseaseScenario() {
InfectiousDiseaseScenario scenario = new InfectiousDiseaseScenario();
LocationDefinition definition = new LocationDefinition();
definition.setDescription("Allegheny County, Pennsylvania");
// set the scenario location to Allegheny County
Location location = new Location();
location.setLocationCode("42003");
scenario.setLocation(location);
// set the scenario date to 2009/09/01
GregorianCalendar calendar = new GregorianCalendar();
calendar.set(Calendar.YEAR, 2009);
calendar.set(Calendar.MONTH, Calendar.SEPTEMBER);
calendar.set(Calendar.DAY_OF_MONTH, 1);
calendar.set(Calendar.HOUR, 0);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
// translate from Java style Calendar to an XML compatible calendar
XMLGregorianCalendar scenarioDate = null;
try {
scenarioDate = DatatypeFactory.newInstance()
.newXMLGregorianCalendar(calendar);
} catch (DatatypeConfigurationException e) {
System.out.println("Error! Unable to set date, error was:"
+ e.getMessage());
System.exit(-1);
}
scenario.setScenarioDate(scenarioDate);
scenario.getInfections().add(getInfection());
scenario.getDiseases().add(getInfectiousDisease());
scenario.getPopulationInfectionAndImmunityCensuses().add(
getPopulationInfectionAndImmunityCensus());
return scenario;
}
public String callRunSimulation() {
RunSimulationMessage message = new RunSimulationMessage();
message.setAuthentication(getAuthentication());
message.setSimulatorIdentification(getSoftwareIdentificationForFred());
message.setSimulatorTimeSpecification(getSimulatorTimeSpecification());
message.setInfectiousDiseaseScenario(getInfectiousDiseaseScenario());
return port.runSimulation(message);
}
public MethodCallStatus checkStatusOfWebServiceCall(
RunAndSoftwareIdentification runAndSoftwareId) {
// give the simulator a chance to launch the simulation
try {
Thread.sleep(10000);
} catch (InterruptedException e1) {
//this is acceptable
}
while (true) {
MethodCallStatus status = port.getRunStatus(runAndSoftwareId);
switch (status.getStatus()) {
case AUTHENTICATION_FAILURE:
case UNAUTHORIZED:
System.out
.println("We weren't authorized for this run! Error message is:"
+ status.getMessage());
return status;
case COMPLETED:
System.out.println("Run completed!");
return status;
case FAILED:
System.out.println("Run Failed! Error message is:"
+ status.getMessage());
return status;
case RUNNING:
case MOVING:
case QUEUED:
case HELD:
case EXITING:
case WAITING:
System.out.println("The "
+ runAndSoftwareId.getSoftwareId().getSoftwareName()
+ " run is active (" + status.getStatus().toString()
+ "). The status message is: " + status.getMessage());
try {
Thread.sleep(20000);
} catch (InterruptedException e) {
}
}
}
}
private void getResourcesFromVisualizer(String simulatorRunId,
SoftwareIdentification visualizerSoftwareIdentification) {
System.out.println("Visualizing runId" + simulatorRunId + " using the "
+ visualizerSoftwareIdentification.getSoftwareName()
+ " visualizer.");
RunVisualizationMessage runVisualizationMessage = new RunVisualizationMessage();
VisualizationOptions options = new VisualizationOptions();
options.setRunId(simulatorRunId);
runVisualizationMessage.setVisualizationOptions(options);
runVisualizationMessage
.setVisualizerIdentification(visualizerSoftwareIdentification);
Authentication auth = new Authentication();
auth.setRequesterId("TutorialUser");
auth.setRequesterPassword("TutorialPassword");
runVisualizationMessage.setAuthentication(auth);
VisualizerResult visualizerResult = port
.runVisualization(runVisualizationMessage);
String visualizationRunId = visualizerResult.getRunId();
RunAndSoftwareIdentification visualizationRunAndSoftwareId = new RunAndSoftwareIdentification();
visualizationRunAndSoftwareId.setRunId(visualizationRunId);
visualizationRunAndSoftwareId
.setSoftwareId(visualizerSoftwareIdentification);
if (checkStatusOfWebServiceCall(visualizationRunAndSoftwareId)
.getStatus() == MethodCallStatusEnum.COMPLETED) {
System.out
.println("The following resources were returned from the "
+ visualizerSoftwareIdentification
.getSoftwareName() + " visualizer:");
for (UrlOutputResource r : visualizerResult
.getVisualizerOutputResource()) {
System.out.println("\t" + r.getURL());
}
}
}
//TODO: move this out of here
public void testPopulationAndEnvironmentCensuses(ApolloServiceEI port) {
SoftwareIdentification id = getSoftwareIdentificationForFred();
GetScenarioLocationCodesSupportedBySimulatorResult result = port.getScenarioLocationCodesSupportedBySimulator(id);
List<String> locationCodes = result.getLocationCodes();
if (!locationCodes.contains("42")) {
throw new RuntimeException("Location codes did not contain code 42");
}
if (!locationCodes.contains("42003")) {
throw new RuntimeException("Location codes did not contain code 42003");
}
GetPopulationAndEnvironmentCensusResult censusResult = port.getPopulationAndEnvironmentCensus(id, "42");
PopulationAndEnvironmentCensus census = censusResult.getPopulationAndEnvironmentCensus();
System.out.println("Location code 42");
System.out.println("NameOfAdministrativeUnit: " + census.getNameOfAdministativeUnit());
System.out.println("NumberOfPeople: " + census.getNumberOfPeople());
System.out.println("NumberOfSchools: " + census.getNumberOfSchools());
System.out.println("NumberOfWorkplaces: " + census.getNumberOfWorkplaces());
System.out.println("Number of sublocations: " + census.getSubLocationCensuses().size());
System.out.println();
// List<PopulationAndEnvironmentCensus> subCensuses = census.getSubLocationCensuses();
// //confirm working that subtree is built
censusResult = port.getPopulationAndEnvironmentCensus(id, "42003");
census = censusResult.getPopulationAndEnvironmentCensus();
System.out.println("Location code 42003");
System.out.println("NameOfAdministrativeUnit: " + census.getNameOfAdministativeUnit());
System.out.println("NumberOfPeople: " + census.getNumberOfPeople());
System.out.println("NumberOfSchools: " + census.getNumberOfSchools());
System.out.println("NumberOfWorkplaces: " + census.getNumberOfWorkplaces());
System.out.println("Number of sublocations: " + census.getSubLocationCensuses().size());
// //confirm working no subtree but good info
//
}
public static void main(String args[]) throws java.lang.Exception {
URL wsdlURL = new URL(WSClient.WSDL_LOC);
WSClient client = new WSClient(
wsdlURL);
String simulationRunId = client.callRunSimulation();
System.out.println("The simulator returned a runId of "
+ simulationRunId);
RunAndSoftwareIdentification runAndSoftwareId = new RunAndSoftwareIdentification();
runAndSoftwareId.setSoftwareId(client
.getSoftwareIdentificationForFred());
runAndSoftwareId.setRunId(simulationRunId);
MethodCallStatus status = client
.checkStatusOfWebServiceCall(runAndSoftwareId);
if (status.getStatus() == MethodCallStatusEnum.COMPLETED) {
client.getResourcesFromVisualizer(simulationRunId,
client.getSoftwareIdentifiationForTimeSeriesVisualizer());
client.getResourcesFromVisualizer(simulationRunId,
client.getSoftwareIdentifiationForGaia());
}
}
}
| Added code to generate a vaccination control strategy
| apollo-ws/service-skeletons/java/trunk/apollo-service-client/src/main/java/edu/pitt/apollo/apolloclient/WSClient.java | Added code to generate a vaccination control strategy | <ide><path>pollo-ws/service-skeletons/java/trunk/apollo-service-client/src/main/java/edu/pitt/apollo/apolloclient/WSClient.java
<ide> import java.net.URL;
<ide> import java.util.Calendar;
<ide> import java.util.GregorianCalendar;
<del>import java.util.List;
<ide>
<ide> import javax.xml.datatype.DatatypeConfigurationException;
<ide> import javax.xml.datatype.DatatypeFactory;
<ide> import edu.pitt.apollo.types.v2_0.ApolloPathogenCode;
<ide> import edu.pitt.apollo.types.v2_0.ApolloSoftwareType;
<ide> import edu.pitt.apollo.types.v2_0.Authentication;
<del>import edu.pitt.apollo.types.v2_0.GetPopulationAndEnvironmentCensusResult;
<del>import edu.pitt.apollo.types.v2_0.GetScenarioLocationCodesSupportedBySimulatorResult;
<add>import edu.pitt.apollo.types.v2_0.ControlStrategyTargetPopulationsAndPrioritization;
<add>import edu.pitt.apollo.types.v2_0.FixedStartTime;
<add>import edu.pitt.apollo.types.v2_0.IndividualTreatmentControlStrategy;
<ide> import edu.pitt.apollo.types.v2_0.Infection;
<ide> import edu.pitt.apollo.types.v2_0.InfectionAcquisition;
<ide> import edu.pitt.apollo.types.v2_0.InfectionState;
<ide> import edu.pitt.apollo.types.v2_0.MethodCallStatus;
<ide> import edu.pitt.apollo.types.v2_0.MethodCallStatusEnum;
<ide> import edu.pitt.apollo.types.v2_0.NumericParameterValue;
<del>import edu.pitt.apollo.types.v2_0.PopulationAndEnvironmentCensus;
<ide> import edu.pitt.apollo.types.v2_0.PopulationInfectionAndImmunityCensus;
<ide> import edu.pitt.apollo.types.v2_0.PopulationInfectionAndImmunityCensusData;
<ide> import edu.pitt.apollo.types.v2_0.PopulationInfectionAndImmunityCensusDataCell;
<add>import edu.pitt.apollo.types.v2_0.ProbabilisticParameterValue;
<ide> import edu.pitt.apollo.types.v2_0.RunAndSoftwareIdentification;
<ide> import edu.pitt.apollo.types.v2_0.RunSimulationMessage;
<ide> import edu.pitt.apollo.types.v2_0.RunVisualizationMessage;
<ide> import edu.pitt.apollo.types.v2_0.TimeStepUnit;
<ide> import edu.pitt.apollo.types.v2_0.UnitOfMeasure;
<ide> import edu.pitt.apollo.types.v2_0.UrlOutputResource;
<add>import edu.pitt.apollo.types.v2_0.Vaccination;
<add>import edu.pitt.apollo.types.v2_0.VaccinationEfficacyForSimulatorConfiguration;
<add>import edu.pitt.apollo.types.v2_0.VaccinationPreventableOutcome;
<add>import edu.pitt.apollo.types.v2_0.Vaccine;
<ide> import edu.pitt.apollo.types.v2_0.VisualizationOptions;
<ide> import edu.pitt.apollo.types.v2_0.VisualizerResult;
<ide>
<ide> port = ss.getApolloServiceEndpoint();
<ide> }
<ide>
<del> public SoftwareIdentification getSoftwareIdentificationForFred() {
<add> public SoftwareIdentification getSoftwareIdentificationForSimulator() {
<ide> SoftwareIdentification softwareId = new SoftwareIdentification();
<ide> softwareId.setSoftwareDeveloper("UPitt,PSC,CMU");
<ide> softwareId.setSoftwareName("FRED");
<ide> private PopulationInfectionAndImmunityCensus getPopulationInfectionAndImmunityCensus() {
<ide> PopulationInfectionAndImmunityCensus census = new PopulationInfectionAndImmunityCensus();
<ide> census.setDescription("Population of Allegheny County, Pennsylvania");
<add>
<ide> GregorianCalendar calendar = new GregorianCalendar();
<add> calendar.set(Calendar.YEAR, 2009);
<add> calendar.set(Calendar.MONTH, Calendar.SEPTEMBER);
<add> calendar.set(Calendar.DAY_OF_MONTH, 1);
<add> calendar.set(Calendar.HOUR, 0);
<add> calendar.set(Calendar.MINUTE, 0);
<add> calendar.set(Calendar.SECOND, 0);
<add> calendar.set(Calendar.MILLISECOND, 0);
<add>
<ide> XMLGregorianCalendar censusDate = null;
<ide> try {
<ide> censusDate = DatatypeFactory.newInstance().newXMLGregorianCalendar(
<ide> + e.getMessage());
<ide> System.exit(-1);
<ide> }
<del>
<ide> census.setDate(censusDate);
<ide>
<ide> Location location = new Location();
<ide> data.getCensusDataCells().add(recoveredCell);
<ide>
<ide> census.setCensusData(data);
<del>
<ide> return census;
<ide> }
<ide>
<ide>
<ide> public String callRunSimulation() {
<ide> RunSimulationMessage message = new RunSimulationMessage();
<add> message.setInfectiousDiseaseScenario(getInfectiousDiseaseScenario());
<ide> message.setAuthentication(getAuthentication());
<del> message.setSimulatorIdentification(getSoftwareIdentificationForFred());
<add> message.setSimulatorIdentification(getSoftwareIdentificationForSimulator());
<ide> message.setSimulatorTimeSpecification(getSimulatorTimeSpecification());
<del> message.setInfectiousDiseaseScenario(getInfectiousDiseaseScenario());
<add> message.getInfectiousDiseaseScenario().getInfectiousDiseaseControlStrategies().add(getVaccinationControlStrategy());
<ide> return port.runSimulation(message);
<ide> }
<ide>
<ide> try {
<ide> Thread.sleep(10000);
<ide> } catch (InterruptedException e1) {
<del> //this is acceptable
<add> // this is acceptable
<ide> }
<ide> while (true) {
<ide> MethodCallStatus status = port.getRunStatus(runAndSoftwareId);
<ide> case AUTHENTICATION_FAILURE:
<ide> case UNAUTHORIZED:
<ide> System.out
<del> .println("We weren't authorized for this run! Error message is:"
<add> .println("No authorization for this run! Error message is:"
<ide> + status.getMessage());
<ide> return status;
<ide> case COMPLETED:
<ide> SoftwareIdentification visualizerSoftwareIdentification) {
<ide> System.out.println("Visualizing runId" + simulatorRunId + " using the "
<ide> + visualizerSoftwareIdentification.getSoftwareName()
<del> + " visualizer.");
<add> + " visualizer...");
<ide>
<ide> RunVisualizationMessage runVisualizationMessage = new RunVisualizationMessage();
<ide>
<ide> VisualizationOptions options = new VisualizationOptions();
<ide> options.setRunId(simulatorRunId);
<add> options.setLocation("42003");
<add> options.setOutputFormat("default");
<ide> runVisualizationMessage.setVisualizationOptions(options);
<ide>
<ide> runVisualizationMessage
<ide> }
<ide>
<ide> }
<del>
<del> //TODO: move this out of here
<del> public void testPopulationAndEnvironmentCensuses(ApolloServiceEI port) {
<del>
<del> SoftwareIdentification id = getSoftwareIdentificationForFred();
<del> GetScenarioLocationCodesSupportedBySimulatorResult result = port.getScenarioLocationCodesSupportedBySimulator(id);
<del> List<String> locationCodes = result.getLocationCodes();
<del>
<del> if (!locationCodes.contains("42")) {
<del> throw new RuntimeException("Location codes did not contain code 42");
<del> }
<del> if (!locationCodes.contains("42003")) {
<del> throw new RuntimeException("Location codes did not contain code 42003");
<del> }
<del> GetPopulationAndEnvironmentCensusResult censusResult = port.getPopulationAndEnvironmentCensus(id, "42");
<del> PopulationAndEnvironmentCensus census = censusResult.getPopulationAndEnvironmentCensus();
<del> System.out.println("Location code 42");
<del> System.out.println("NameOfAdministrativeUnit: " + census.getNameOfAdministativeUnit());
<del> System.out.println("NumberOfPeople: " + census.getNumberOfPeople());
<del> System.out.println("NumberOfSchools: " + census.getNumberOfSchools());
<del> System.out.println("NumberOfWorkplaces: " + census.getNumberOfWorkplaces());
<del> System.out.println("Number of sublocations: " + census.getSubLocationCensuses().size());
<del> System.out.println();
<del>// List<PopulationAndEnvironmentCensus> subCensuses = census.getSubLocationCensuses();
<del>
<del> // //confirm working that subtree is built
<del> censusResult = port.getPopulationAndEnvironmentCensus(id, "42003");
<del> census = censusResult.getPopulationAndEnvironmentCensus();
<del> System.out.println("Location code 42003");
<del> System.out.println("NameOfAdministrativeUnit: " + census.getNameOfAdministativeUnit());
<del> System.out.println("NumberOfPeople: " + census.getNumberOfPeople());
<del> System.out.println("NumberOfSchools: " + census.getNumberOfSchools());
<del> System.out.println("NumberOfWorkplaces: " + census.getNumberOfWorkplaces());
<del> System.out.println("Number of sublocations: " + census.getSubLocationCensuses().size());
<del> // //confirm working no subtree but good info
<del> //
<del>
<del> }
<del>
<add>
<add> private Vaccination getVaccination() {
<add> Vaccination vacc = new Vaccination();
<add> vacc.setDescription("H1N1 Vaccine");
<add> vacc.setNumDosesInTreatmentCourse(new BigInteger("1"));
<add> vacc.setSpeciesOfTreatedOrganisms("Homo sapiens");
<add> vacc.getTreatmentContraindications();
<add>
<add> Vaccine vaccine = new Vaccine();
<add> vaccine.setDescription("Influenza A (H1N1) 2009 Monovalent Vaccine");
<add> vacc.setVaccine(vaccine);
<add>
<add> VaccinationEfficacyForSimulatorConfiguration vesc = new VaccinationEfficacyForSimulatorConfiguration();
<add>
<add> ApolloPathogenCode strain = new ApolloPathogenCode();
<add> strain.setNcbiTaxonId("114727");
<add> // strain.setGisrnCladeName("A/(H3N2) Victoria/361//2011-like");
<add>
<add> vesc.setStrainIdentifier(strain);
<add> vesc.setForVaccinationPreventableOutcome(VaccinationPreventableOutcome.INFECTION);
<add> // vesc.setTreatment(vacc);
<add> // vesc.setTreatment(t);
<add> vesc.setVaccineIdentifier("Influenza A (H1N1) 2009 Monovalent Vaccine");
<add> vesc.setAverageVaccinationEfficacy(0.47);
<add> vesc.setDescription("The vaccination efficacy for the Influenza A (H1N1) 2009 Monovalent Vaccine");
<add>
<add> vacc.getVaccinationEfficacies().add(vesc);
<add>
<add> return vacc;
<add> }
<add>
<add> private ProbabilisticParameterValue getControlStrategyCompilance() {
<add> ProbabilisticParameterValue compliance = new ProbabilisticParameterValue();
<add> compliance.setValue(0.5);
<add> return compliance;
<add>
<add> }
<add>
<add> private ControlStrategyTargetPopulationsAndPrioritization getTargetPopulationsAndPrioritizations() {
<add> ControlStrategyTargetPopulationsAndPrioritization targetPopulationsAndPrioritization = new ControlStrategyTargetPopulationsAndPrioritization();
<add> targetPopulationsAndPrioritization
<add> .setControlStrategyNamedPrioritizationScheme("ACIP");
<add> return targetPopulationsAndPrioritization;
<add> }
<add>
<add> private NumericParameterValue getResponseDelay() {
<add> NumericParameterValue responseDelay = new NumericParameterValue();
<add> responseDelay.setUnitOfMeasure(UnitOfMeasure.DAYS);
<add> responseDelay.setValue(0d);
<add>
<add> return responseDelay;
<add>
<add> }
<add>
<add> private FixedStartTime getFixedStartTime() {
<add> FixedStartTime fixedStartTime = new FixedStartTime();
<add> fixedStartTime.setStartTimeRelativeToScenarioDate(new BigInteger("0"));
<add> fixedStartTime.setStopTimeRelativeToScenarioDate(new BigInteger("90"));
<add> return fixedStartTime;
<add> }
<add>
<add> private IndividualTreatmentControlStrategy getVaccinationControlStrategy() {
<add> IndividualTreatmentControlStrategy vaccinationControlMeasure = new IndividualTreatmentControlStrategy();
<add> vaccinationControlMeasure.setControlStrategyCompliance(getControlStrategyCompilance());
<add> vaccinationControlMeasure.setControlStrategyReactiveEndPointFraction(1.0);
<add> vaccinationControlMeasure.setControlStrategyResponseDelay(getResponseDelay());
<add> vaccinationControlMeasure.setControlStrategyStartTime(getFixedStartTime());
<add> vaccinationControlMeasure.setDescription("An example vaccination control strategy.");
<add> vaccinationControlMeasure.setIndividualTreatment(getVaccination());
<add> vaccinationControlMeasure.setTargetPopulationsAndPrioritizations(getTargetPopulationsAndPrioritizations());
<add>
<add> for (int i = 0; i < 90; i++)
<add> vaccinationControlMeasure.getSupplySchedule().add(new BigInteger("3500"));
<add>
<add> for (int i = 0; i < 90; i++)
<add> vaccinationControlMeasure.getAdministrationCapacity().add(new BigInteger("3500"));
<add>
<add> return vaccinationControlMeasure;
<add> }
<ide>
<ide> public static void main(String args[]) throws java.lang.Exception {
<del> URL wsdlURL = new URL(WSClient.WSDL_LOC);
<add> URL wsdlURL = ApolloServiceV20.WSDL_LOCATION;
<add> if (args.length > 0 && args[0] != null && !"".equals(args[0])) {
<add> File wsdlFile = new File(args[0]);
<add> try {
<add> if (wsdlFile.exists()) {
<add> wsdlURL = wsdlFile.toURI().toURL();
<add> } else {
<add> wsdlURL = new URL(args[0]);
<add> }
<add> } catch (MalformedURLException e) {
<add> e.printStackTrace();
<add> }
<add> }
<add>
<ide> WSClient client = new WSClient(
<ide> wsdlURL);
<ide> String simulationRunId = client.callRunSimulation();
<ide> System.out.println("The simulator returned a runId of "
<ide> + simulationRunId);
<add>
<ide> RunAndSoftwareIdentification runAndSoftwareId = new RunAndSoftwareIdentification();
<ide> runAndSoftwareId.setSoftwareId(client
<del> .getSoftwareIdentificationForFred());
<add> .getSoftwareIdentificationForSimulator());
<ide> runAndSoftwareId.setRunId(simulationRunId);
<add>
<ide> MethodCallStatus status = client
<ide> .checkStatusOfWebServiceCall(runAndSoftwareId);
<add>
<ide> if (status.getStatus() == MethodCallStatusEnum.COMPLETED) {
<ide> client.getResourcesFromVisualizer(simulationRunId,
<ide> client.getSoftwareIdentifiationForTimeSeriesVisualizer());
<ide> }
<ide>
<ide> }
<del>
<ide>
<ide> |
|
Java | bsd-3-clause | error: pathspec 'src/java/fr/paris/lutece/util/method/MethodUtil.java' did not match any file(s) known to git
| a9943d65c9d76eec8d8a03a7390eed07cc83363f | 1 | rzara/lutece-core,lutece-platform/lutece-core,rzara/lutece-core,lutece-platform/lutece-core,lutece-platform/lutece-core,rzara/lutece-core | /*
* Copyright (c) 2002-2012, Mairie de Paris
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright notice
* and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice
* and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* 3. Neither the name of 'Mairie de Paris' nor 'Lutece' nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* License 1.0
*/
package fr.paris.lutece.util.method;
import org.apache.commons.lang.StringUtils;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
/**
*
* MethodUtils
*
*/
public final class MethodUtil
{
private static final String PREFIX_GET = "get";
private static final String PREFIX_SET = "set";
/**
* Instantiates a new method utils.
*/
private MethodUtil( )
{
}
/**
* Sets the attribute.
* <br />
* <strong>Warning :</warning> This method does not handle setter that :
* <ul>
* <li>has no parameter or has more than one parameter</li>
* <li>has array parameter (ie : String[] or int[] ...)</li>
* </ul>
*
* @param <A> the generic type of the instance
* @param <B> the generic type of the value to set
* @param instance the instance to set
* @param strAttributeName the attribute name
* @param value the value of the attribute to set
* @throws SecurityException the security exception
* @throws NoSuchMethodException the no such method exception
* @throws IllegalArgumentException the illegal argument exception
* @throws IllegalAccessException the illegal access exception
* @throws InvocationTargetException the invocation target exception
*/
public static <A, B> void set( A instance, String strAttributeName, B value )
throws SecurityException, NoSuchMethodException, IllegalArgumentException, IllegalAccessException, InvocationTargetException
{
if ( StringUtils.isNotBlank( strAttributeName ) && ( instance != null ) && ( value != null ) )
{
Method methodSetter = getSetter( instance, strAttributeName, value.getClass( ) );
if ( methodSetter != null )
{
methodSetter.invoke( instance, new Object[]{ value } );
}
else
{
throw new NoSuchMethodException( );
}
}
else
{
throw new IllegalArgumentException( "One on the parameters is null/blank." );
}
}
/**
* Gets the method.
*
* @param <A> the generic type of the instance
* @param strMethodPrefix the str method prefix
* @param instance the instance
* @param strAttributeName the str attribute name
* @param clazz the clazz
* @return the method
* @throws SecurityException the security exception
* @throws NoSuchMethodException the no such method exception
*/
public static <A> Method getMethod( String strMethodPrefix, A instance, String strAttributeName, Class<?> clazz )
throws SecurityException, NoSuchMethodException
{
String strFirstLetter = strAttributeName.substring( 0, 1 ).toUpperCase( );
String strMethodName = strMethodPrefix + strFirstLetter +
strAttributeName.substring( 1, strAttributeName.length( ) );
try
{
return instance.getClass( ).getMethod( strMethodName, new Class[]{ clazz } );
}
catch ( NoSuchMethodException e )
{
return getPrimitiveMethod( strMethodName, instance, clazz );
}
}
/**
* Gets the primitive method.
*
* @param <A> the generic type of the instance
* @param strMethodName the str method name
* @param instance the instance
* @param clazz the clazz
* @return the primitive method
* @throws SecurityException the security exception
* @throws NoSuchMethodException the no such method exception
*/
public static <A> Method getPrimitiveMethod( String strMethodName, A instance, Class<?> clazz )
throws SecurityException, NoSuchMethodException
{
if ( clazz.equals( Integer.class ) )
{
return instance.getClass( ).getMethod( strMethodName, new Class[]{ int.class } );
}
else if ( clazz.equals( Long.class ) )
{
return instance.getClass( ).getMethod( strMethodName, new Class[]{ long.class } );
}
else if ( clazz.equals( Double.class ) )
{
return instance.getClass( ).getMethod( strMethodName, new Class[]{ double.class } );
}
else if ( clazz.equals( Short.class ) )
{
return instance.getClass( ).getMethod( strMethodName, new Class[]{ short.class } );
}
else if ( clazz.equals( Byte.class ) )
{
return instance.getClass( ).getMethod( strMethodName, new Class[]{ byte.class } );
}
else if ( clazz.equals( Float.class ) )
{
return instance.getClass( ).getMethod( strMethodName, new Class[]{ float.class } );
}
else if ( clazz.equals( Character.class ) )
{
return instance.getClass( ).getMethod( strMethodName, new Class[]{ char.class } );
}
else if ( clazz.equals( Boolean.class ) )
{
return instance.getClass( ).getMethod( strMethodName, new Class[]{ boolean.class } );
}
throw new NoSuchMethodException( );
}
/**
* Gets the setter.
*
* @param instance the instance
* @param strAttributeName the str attribute name
* @param clazz the clazz
* @return the setter
* @throws SecurityException the security exception
* @throws NoSuchMethodException the no such method exception
*/
public static <A> Method getSetter( A instance, String strAttributeName, Class<?> clazz ) throws SecurityException, NoSuchMethodException
{
return getMethod( PREFIX_SET, instance, strAttributeName, clazz );
}
/**
* Gets the setter.
*
* @param <A> the generic type of the instance
* @param instance the instance
* @param strAttributeName the str attribute name
* @param clazz the clazz
* @return the setter
* @throws SecurityException the security exception
* @throws NoSuchMethodException the no such method exception
*/
public static <A> Method getGetter( A instance, String strAttributeName, Class<?> clazz ) throws SecurityException, NoSuchMethodException
{
return getMethod( PREFIX_GET, instance, strAttributeName, clazz );
}
}
| src/java/fr/paris/lutece/util/method/MethodUtil.java | LUTECE-1497 : Add the MethodUtil that allow to invoke getter and setter dynamically
git-svn-id: 890dd67775b5971c21efd90062c158582082fe1b@42993 bab10101-e421-0410-a517-8ce0973de3ef
| src/java/fr/paris/lutece/util/method/MethodUtil.java | LUTECE-1497 : Add the MethodUtil that allow to invoke getter and setter dynamically | <ide><path>rc/java/fr/paris/lutece/util/method/MethodUtil.java
<add>/*
<add> * Copyright (c) 2002-2012, Mairie de Paris
<add> * All rights reserved.
<add> *
<add> * Redistribution and use in source and binary forms, with or without
<add> * modification, are permitted provided that the following conditions
<add> * are met:
<add> *
<add> * 1. Redistributions of source code must retain the above copyright notice
<add> * and the following disclaimer.
<add> *
<add> * 2. Redistributions in binary form must reproduce the above copyright notice
<add> * and the following disclaimer in the documentation and/or other materials
<add> * provided with the distribution.
<add> *
<add> * 3. Neither the name of 'Mairie de Paris' nor 'Lutece' nor the names of its
<add> * contributors may be used to endorse or promote products derived from
<add> * this software without specific prior written permission.
<add> *
<add> * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
<add> * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
<add> * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
<add> * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
<add> * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
<add> * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
<add> * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
<add> * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
<add> * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
<add> * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
<add> * POSSIBILITY OF SUCH DAMAGE.
<add> *
<add> * License 1.0
<add> */
<add>package fr.paris.lutece.util.method;
<add>
<add>import org.apache.commons.lang.StringUtils;
<add>
<add>import java.lang.reflect.InvocationTargetException;
<add>import java.lang.reflect.Method;
<add>
<add>/**
<add> *
<add> * MethodUtils
<add> *
<add> */
<add>public final class MethodUtil
<add>{
<add> private static final String PREFIX_GET = "get";
<add> private static final String PREFIX_SET = "set";
<add>
<add> /**
<add> * Instantiates a new method utils.
<add> */
<add> private MethodUtil( )
<add> {
<add> }
<add>
<add> /**
<add> * Sets the attribute.
<add> * <br />
<add> * <strong>Warning :</warning> This method does not handle setter that :
<add> * <ul>
<add> * <li>has no parameter or has more than one parameter</li>
<add> * <li>has array parameter (ie : String[] or int[] ...)</li>
<add> * </ul>
<add> *
<add> * @param <A> the generic type of the instance
<add> * @param <B> the generic type of the value to set
<add> * @param instance the instance to set
<add> * @param strAttributeName the attribute name
<add> * @param value the value of the attribute to set
<add> * @throws SecurityException the security exception
<add> * @throws NoSuchMethodException the no such method exception
<add> * @throws IllegalArgumentException the illegal argument exception
<add> * @throws IllegalAccessException the illegal access exception
<add> * @throws InvocationTargetException the invocation target exception
<add> */
<add> public static <A, B> void set( A instance, String strAttributeName, B value )
<add> throws SecurityException, NoSuchMethodException, IllegalArgumentException, IllegalAccessException, InvocationTargetException
<add> {
<add> if ( StringUtils.isNotBlank( strAttributeName ) && ( instance != null ) && ( value != null ) )
<add> {
<add> Method methodSetter = getSetter( instance, strAttributeName, value.getClass( ) );
<add> if ( methodSetter != null )
<add> {
<add> methodSetter.invoke( instance, new Object[]{ value } );
<add> }
<add> else
<add> {
<add> throw new NoSuchMethodException( );
<add> }
<add> }
<add> else
<add> {
<add> throw new IllegalArgumentException( "One on the parameters is null/blank." );
<add> }
<add> }
<add>
<add> /**
<add> * Gets the method.
<add> *
<add> * @param <A> the generic type of the instance
<add> * @param strMethodPrefix the str method prefix
<add> * @param instance the instance
<add> * @param strAttributeName the str attribute name
<add> * @param clazz the clazz
<add> * @return the method
<add> * @throws SecurityException the security exception
<add> * @throws NoSuchMethodException the no such method exception
<add> */
<add> public static <A> Method getMethod( String strMethodPrefix, A instance, String strAttributeName, Class<?> clazz )
<add> throws SecurityException, NoSuchMethodException
<add> {
<add> String strFirstLetter = strAttributeName.substring( 0, 1 ).toUpperCase( );
<add>
<add> String strMethodName = strMethodPrefix + strFirstLetter +
<add> strAttributeName.substring( 1, strAttributeName.length( ) );
<add> try
<add> {
<add> return instance.getClass( ).getMethod( strMethodName, new Class[]{ clazz } );
<add> }
<add> catch ( NoSuchMethodException e )
<add> {
<add> return getPrimitiveMethod( strMethodName, instance, clazz );
<add> }
<add> }
<add>
<add> /**
<add> * Gets the primitive method.
<add> *
<add> * @param <A> the generic type of the instance
<add> * @param strMethodName the str method name
<add> * @param instance the instance
<add> * @param clazz the clazz
<add> * @return the primitive method
<add> * @throws SecurityException the security exception
<add> * @throws NoSuchMethodException the no such method exception
<add> */
<add> public static <A> Method getPrimitiveMethod( String strMethodName, A instance, Class<?> clazz )
<add> throws SecurityException, NoSuchMethodException
<add> {
<add> if ( clazz.equals( Integer.class ) )
<add> {
<add> return instance.getClass( ).getMethod( strMethodName, new Class[]{ int.class } );
<add> }
<add> else if ( clazz.equals( Long.class ) )
<add> {
<add> return instance.getClass( ).getMethod( strMethodName, new Class[]{ long.class } );
<add> }
<add> else if ( clazz.equals( Double.class ) )
<add> {
<add> return instance.getClass( ).getMethod( strMethodName, new Class[]{ double.class } );
<add> }
<add> else if ( clazz.equals( Short.class ) )
<add> {
<add> return instance.getClass( ).getMethod( strMethodName, new Class[]{ short.class } );
<add> }
<add> else if ( clazz.equals( Byte.class ) )
<add> {
<add> return instance.getClass( ).getMethod( strMethodName, new Class[]{ byte.class } );
<add> }
<add> else if ( clazz.equals( Float.class ) )
<add> {
<add> return instance.getClass( ).getMethod( strMethodName, new Class[]{ float.class } );
<add> }
<add> else if ( clazz.equals( Character.class ) )
<add> {
<add> return instance.getClass( ).getMethod( strMethodName, new Class[]{ char.class } );
<add> }
<add> else if ( clazz.equals( Boolean.class ) )
<add> {
<add> return instance.getClass( ).getMethod( strMethodName, new Class[]{ boolean.class } );
<add> }
<add> throw new NoSuchMethodException( );
<add> }
<add>
<add> /**
<add> * Gets the setter.
<add> *
<add> * @param instance the instance
<add> * @param strAttributeName the str attribute name
<add> * @param clazz the clazz
<add> * @return the setter
<add> * @throws SecurityException the security exception
<add> * @throws NoSuchMethodException the no such method exception
<add> */
<add> public static <A> Method getSetter( A instance, String strAttributeName, Class<?> clazz ) throws SecurityException, NoSuchMethodException
<add> {
<add> return getMethod( PREFIX_SET, instance, strAttributeName, clazz );
<add> }
<add>
<add> /**
<add> * Gets the setter.
<add> *
<add> * @param <A> the generic type of the instance
<add> * @param instance the instance
<add> * @param strAttributeName the str attribute name
<add> * @param clazz the clazz
<add> * @return the setter
<add> * @throws SecurityException the security exception
<add> * @throws NoSuchMethodException the no such method exception
<add> */
<add> public static <A> Method getGetter( A instance, String strAttributeName, Class<?> clazz ) throws SecurityException, NoSuchMethodException
<add> {
<add> return getMethod( PREFIX_GET, instance, strAttributeName, clazz );
<add> }
<add>} |
|
JavaScript | isc | 111f6d979dcdfa000328105ba369107a49eb6392 | 0 | pka/mapbox-gl-style-spec,mapbox/mapbox-gl-style-lint,pka/mapbox-gl-style-spec | 'use strict';
var jsonlint = require('jsonlint-lines');
var reference = require('./reference');
var parseCSSColor = require('csscolorparser').parseCSSColor;
module.exports = {};
module.exports.value = value;
['v2','v3'].forEach(function(version) {
var ref = reference(version);
// Create validator for version
module.exports[version] = validator(ref);
// Create validators for each container ('style', 'bucket', etc.)
for (var container in ref) {
if (typeof ref[container] !== 'object') continue;
if (ref[container].type && typeof ref[container].type !== 'object') continue;
module.exports[version][container] = validateContainer(ref, container);
}
});
// Generates a validation function for a container object.
function validateContainer(ref, container) {
return function (property, val, line) {
var errors = [];
var spec = ref[container][property];
if (!spec) {
errors.push({
message: container + ' property unrecognized: ' + property,
line: line
});
} else {
value(property, val, {}, ref, spec, errors);
}
return errors;
};
}
function validator(ref) {
var validate = function(str) {
var style, errors = [];
try {
style = jsonlint.parse(str.toString());
} catch(e) {
var match = e.message.match(/line (\d+)/),
lineNumber = 0;
if (match) lineNumber = parseInt(match[1], 10);
return [{
line: lineNumber - 1,
message: e.message,
error: e
}];
}
style = jsonlint.parse(str.toString());
value('', style, style.constants||{}, ref, ref.$root, errors);
return errors;
};
return validate;
}
// Main recursive validation function. Tracks:
//
// - key: string representing location of validation in style tree. Used only
// for more informative error reporting. Example: `styles.default.road`
// - val: current value from style being evaluated. May be anything from a
// high level object that needs to be descended into deeper or a simple
// scalar value.
// - constants: object of constants for the style to resolve constant values.
// - ref: full reference object. Used if any point in validation refers to
// a type defined at the root-level of the reference object.
// - spec: current spec being evaluated. Tracks val.
// - errors: array of errors passed by reference.
//
// Returns true if the `val` passed (and any children recursed into) pass
// the validation defined by `spec`. Returns false if validation fails.
// Validation errors will be pushed onto the errors array.
function value(key, val, constants, ref, spec, errors) {
var pass = true;
// Resolve constants.
if (typeof val === 'string' && constants[val] !== undefined) val = constants[val];
// Spec specifies a non-native type (bucket, style, etc.)
if (spec.type && ref[spec.type]) {
return value(key, val, constants, ref, ref[spec.type], errors);
// Spec specifies a type, but val must be an array of those (layers only atm).
} else if (spec.type === 'array' && spec.value) {
if (Array.isArray(val)) {
if (spec.length && val.length !== spec.length) {
errors.push({
message: key + ': array length ' + spec.length + ' expected, length ' + val.length + ' found',
line: val.__line__
});
return false;
}
for (var i = 0; i < val.length; i++) {
var valspec = ref[spec.value]||spec.value;
if (typeof valspec === 'string') {
pass = validateNative(key + '[' + i + ']', val[i], valspec, errors) && pass;
} else {
pass = value(key + '[' + i + ']', val[i], constants, ref, valspec, errors) && pass;
}
}
return pass;
} else {
errors.push({
message: key + ': array expected, ' + typeof val + ' found',
line: val.__line__
});
return false;
}
// Spec specifies an array of specs val may match.
} else if (Array.isArray(spec)) {
var sub = [];
var valid = spec.some(function(s) {
var spec = typeof s === 'string' ? (ref[s] || s) : s;
if (typeof spec === 'string') {
return validateNative(key, val, spec, sub);
} else {
return value(key, val, constants, ref, spec, sub);
}
});
if (!valid) sub.forEach(function(err) { errors.push(err); });
return valid;
// Val is a function.
} else if (spec.function && typeof val === 'object' && val.fn) {
return value(key, val, constants, ref, ref.function, errors);
// Val must be one of enumerated values.
} else if (spec.type === 'enum') {
return validateEnum(key, val, spec.values, errors);
// Val must match a type.
} else if (spec.type && typeof spec.type === 'string') {
return validateNative(key, val, spec.type, errors);
// No type defined: spec is a container. Val must be an object
// and must have keys matching the container object definition.
} else {
for (var k in val) {
var childkey = (key ? key + '.' : key) + k;
var def = spec[k] || spec['*'] || undefined;
if (!def) {
errors.push({
message: spec.__name__ + ' property unrecognized: ' + k,
line: val.__line__
});
pass = false;
continue;
}
pass = value(childkey, val[k], constants, ref, def, errors) && pass;
}
for (var l in spec) {
if (spec[l].required && spec[l]['default'] === undefined && val[l] === undefined) {
errors.push({
message: spec.__name__ + ' property ' + l + ' required',
line: val.__line__
});
pass = false;
}
}
return pass;
}
}
function validateNative(key, val, spec, errors) {
if (spec === '*') return true;
var type = Array.isArray(val) ? 'array' : typeof val;
if (spec === 'color') {
if (type !== 'string') {
errors.push({
message: key + ': ' + spec + ' expected, ' + type + ' found',
line: val.__line__
});
return false;
} else if (parseCSSColor(val) === null) {
errors.push({
message: key + ': ' + spec + ' expected, ' + val + ' found',
line: val.__line__
});
return false;
} else {
return true;
}
}
if (type !== spec) {
errors.push({
message: key + ': ' + spec + ' expected, ' + (typeof val) + ' found',
line: val.__line__
});
return false;
} else {
return true;
}
}
function validateEnum(key, val, spec, errors) {
if (spec.indexOf(val) === -1) {
errors.push({
message: key + ': expected one of [' + spec.join(', ') + '], ' + val + ' found',
line: val.__line__
});
return false;
} else {
return true;
}
}
| lib/validate.js | 'use strict';
var jsonlint = require('jsonlint-lines');
var reference = require('./reference');
var parseCSSColor = require('csscolorparser').parseCSSColor;
module.exports = {};
module.exports.value = value;
['v2','v3'].forEach(function(version) {
var ref = reference(version);
// Create validator for version
module.exports[version] = validator(ref);
// Create validators for each container ('style', 'bucket', etc.)
for (var container in ref) {
if (typeof ref[container] !== 'object') continue;
if (ref[container].type && typeof ref[container].type !== 'object') continue;
module.exports[version][container] = validateContainer(ref, container);
}
});
// Generates a validation function for a container object.
function validateContainer(ref, container) {
return function (property, val, line) {
var errors = [];
var spec = ref[container][property];
if (!spec) {
errors.push({
message: container + ' property unrecognized: ' + property,
line: line
});
} else {
value(property, val, {}, ref, spec, errors);
}
return errors;
};
}
function validator(ref) {
var validate = function(str) {
var style, errors = [];
try {
style = jsonlint.parse(str.toString());
} catch(e) {
var match = e.message.match(/line (\d+)/),
lineNumber = 0;
if (match) lineNumber = parseInt(match[1], 10);
return [{
line: lineNumber - 1,
message: e.message,
error: e
}];
}
style = jsonlint.parse(str.toString());
value('', style, style.constants||{}, ref, ref.$root, errors);
return errors;
};
return validate;
}
// Main recursive validation function. Tracks:
//
// - key: string representing location of validation in style tree. Used only
// for more informative error reporting. Example: `styles.default.road`
// - val: current value from style being evaluated. May be anything from a
// high level object that needs to be descended into deeper or a simple
// scalar value.
// - constants: object of constants for the style to resolve constant values.
// - ref: full reference object. Used if any point in validation refers to
// a type defined at the root-level of the reference object.
// - spec: current spec being evaluated. Tracks val.
// - errors: array of errors passed by reference.
//
// Returns true if the `val` passed (and any children recursed into) pass
// the validation defined by `spec`. Returns false if validation fails.
// Validation errors will be pushed onto the errors array.
function value(key, val, constants, ref, spec, errors) {
var pass = true;
// Resolve constants.
if (typeof val === 'string' && constants[val] !== undefined) val = constants[val];
// Spec specifies a non-native type (bucket, style, etc.)
if (spec.type && ref[spec.type]) {
return value(key, val, constants, ref, ref[spec.type], errors);
// Spec specifies a type, but val must be an array of those (layers only atm).
} else if (spec.type === 'array' && spec.value) {
if (Array.isArray(val)) {
if (spec.length && val.length !== spec.length) {
errors.push({
message: key + ': array length ' + spec.length + ' expected, length ' + val.length + ' found',
line: val.__line__
});
return false;
}
for (var i = 0; i < val.length; i++) {
var valspec = ref[spec.value]||spec.value;
if (typeof valspec === 'string') {
pass = validateNative(key + '[' + i + ']', val[i], valspec, errors) && pass;
} else {
pass = value(key + '[' + i + ']', val[i], constants, ref, valspec, errors) && pass;
}
}
return pass;
} else {
errors.push({
message: key + ': array expected, ' + typeof val + ' found',
line: val.__line__
});
return false;
}
// Spec specifies an array of specs val may match.
} else if (Array.isArray(spec)) {
var sub = [];
var valid = spec.some(function(s) {
var spec = typeof s === 'string' ? (ref[s] || s) : s;
if (typeof spec === 'string') {
return validateNative(key, val, spec, sub);
} else {
return value(key, val, constants, ref, spec, sub);
}
});
if (!valid) sub.forEach(function(err) { errors.push(err); });
return valid;
// Val is a function.
} else if (spec.function && typeof val === 'object' && val.fn) {
return value(key, val, constants, ref, ref.function, errors);
// Val must be one of enumerated values.
} else if (spec.type === 'enum') {
return validateEnum(key, val, spec.values, errors);
// Val must match a type.
} else if (spec.type && typeof spec.type === 'string') {
return validateNative(key, val, spec.type, errors);
// No type defined: spec is a container. Val must be an object
// and must have keys matching the container object definition.
} else {
for (var k in val) {
var childkey = (key ? key + '.' : key) + k;
var def = spec[k] || spec['*'] || undefined;
if (!def) {
errors.push({
message: spec.__name__ + ' property unrecognized: ' + k,
line: val.__line__
});
pass = false;
continue;
}
pass = value(childkey, val[k], constants, ref, def, errors) && pass;
}
for (var l in spec) {
if (spec[l].required && spec[l]['default'] === undefined && val[l] === undefined) {
errors.push({
message: spec.__name__ + ' property ' + l + ' required',
line: val.__line__
});
pass = false;
}
}
return pass;
}
}
function validateNative(key, val, spec, errors) {
if (spec === '*') return true;
var type = Array.isArray(val) ? 'array' : typeof val;
if (spec === 'color') {
if (type === 'array') {
if (val.length > 4 || val.length < 3) {
errors.push({
message: key + ': ' + spec + ' expected, ' + val + ' found',
line: val.__line__
});
return false;
}
return true;
} else if (type !== 'string') {
errors.push({
message: key + ': ' + spec + ' expected, ' + type + ' found',
line: val.__line__
});
return false;
} else if (parseCSSColor(val) === null) {
errors.push({
message: key + ': ' + spec + ' expected, ' + val + ' found',
line: val.__line__
});
return false;
} else {
return true;
}
}
if (type !== spec) {
errors.push({
message: key + ': ' + spec + ' expected, ' + (typeof val) + ' found',
line: val.__line__
});
return false;
} else {
return true;
}
}
function validateEnum(key, val, spec, errors) {
if (spec.indexOf(val) === -1) {
errors.push({
message: key + ': expected one of [' + spec.join(', ') + '], ' + val + ' found',
line: val.__line__
});
return false;
} else {
return true;
}
}
| Consider color arrays invalid. Refs mapbox/mapbox-gl-style-spec#43
| lib/validate.js | Consider color arrays invalid. Refs mapbox/mapbox-gl-style-spec#43 | <ide><path>ib/validate.js
<ide> var type = Array.isArray(val) ? 'array' : typeof val;
<ide>
<ide> if (spec === 'color') {
<del> if (type === 'array') {
<del> if (val.length > 4 || val.length < 3) {
<del> errors.push({
<del> message: key + ': ' + spec + ' expected, ' + val + ' found',
<del> line: val.__line__
<del> });
<del> return false;
<del> }
<del> return true;
<del> } else if (type !== 'string') {
<add> if (type !== 'string') {
<ide> errors.push({
<ide> message: key + ': ' + spec + ' expected, ' + type + ' found',
<ide> line: val.__line__ |
|
Java | mit | 2a62c89298f78026720a4c0cb44a25b26bd19f80 | 0 | mcai/Archimulator,mcai/Archimulator,mcai/Archimulator,mcai/Archimulator | /**
* ****************************************************************************
* Copyright (c) 2010-2016 by Min Cai ([email protected]).
* <p>
* This file is part of the Archimulator multicore architectural simulator.
* <p>
* Archimulator is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* <p>
* Archimulator is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* <p>
* You should have received a copy of the GNU General Public License
* along with Archimulator. If not, see <http://www.gnu.org/licenses/>.
* ****************************************************************************
*/
package archimulator.uncore.net;
import archimulator.common.Experiment;
import archimulator.common.Simulation;
import archimulator.common.SimulationEvent;
import archimulator.common.report.ReportNode;
import archimulator.common.report.Reportable;
import archimulator.uncore.AbstractMemoryHierarchy;
import archimulator.uncore.MemoryDevice;
import archimulator.uncore.net.noc.Network;
import archimulator.uncore.net.noc.routers.FlitState;
import archimulator.uncore.net.noc.routing.OddEvenTurnBasedRoutingAlgorithm;
import archimulator.uncore.net.noc.selection.aco.ACONode;
import archimulator.util.event.BlockingEventDispatcher;
import archimulator.util.event.CycleAccurateEventQueue;
/**
* NoC memory hierarchy.
*
* @author Min Cai
*/
public class NoCMemoryHierarchy extends AbstractMemoryHierarchy implements Reportable {
private NoCNet net;
/**
* Create a basic memory hierarchy.
*
* @param experiment the experiment
* @param simulation the simulation
* @param blockingEventDispatcher the blocking event dispatcher
* @param cycleAccurateEventQueue the cycle accurate event queue
*/
public NoCMemoryHierarchy(Experiment experiment, Simulation simulation, BlockingEventDispatcher<SimulationEvent> blockingEventDispatcher, CycleAccurateEventQueue cycleAccurateEventQueue) {
super(experiment, simulation, blockingEventDispatcher, cycleAccurateEventQueue);
this.net = new NoCNet(this);
}
@Override
public Net getNet(MemoryDevice from, MemoryDevice to) {
return net;
}
@Override
public void dumpStats(ReportNode reportNode) {
reportNode.getChildren().add(new ReportNode(reportNode, getName()) {{
Network<ACONode, OddEvenTurnBasedRoutingAlgorithm> network = net.getNetwork();
getChildren().add(
new ReportNode(
this,
"noc/numPacketsReceived",
String.format("%d", network.getNumPacketsReceived())
)
);
getChildren().add(
new ReportNode(
this,
"noc/numPacketsTransmitted",
String.format("%d", network.getNumPacketsTransmitted())
)
);
getChildren().add(
new ReportNode(
this,
"noc/throughput",
String.format("%s", network.throughput())
)
);
getChildren().add(
new ReportNode(
this,
"noc/averagePacketDelay",
String.format("%s", network.averagePacketDelay())
)
);
getChildren().add(
new ReportNode(
this,
"noc/averagePacketHops",
String.format("%s", network.averagePacketHops())
)
);
getChildren().add(
new ReportNode(
this,
"noc/maxPacketDelay",
String.format("%d", network.getMaxPacketDelay())
)
);
getChildren().add(
new ReportNode(
this,
"noc/maxPacketHops",
String.format("%d", network.getMaxPacketHops())
)
);
getChildren().add(
new ReportNode(
this,
"noc/numPayloadPacketsReceived",
String.format("%d", network.getNumPayloadPacketsReceived())
)
);
getChildren().add(
new ReportNode(
this,
"noc/numPayloadPacketsTransmitted",
String.format("%d", network.getNumPayloadPacketsTransmitted())
)
);
getChildren().add(
new ReportNode(
this,
"noc/payloadThroughput",
String.format("%s", network.payloadThroughput())
)
);
getChildren().add(
new ReportNode(
this,
"noc/averagePayloadPacketDelay",
String.format("%s", network.averagePayloadPacketDelay())
)
);
getChildren().add(
new ReportNode(
this,
"noc/averagePayloadPacketHops",
String.format("%s", network.averagePayloadPacketHops())
)
);
getChildren().add(
new ReportNode(
this,
"noc/maxPayloadPacketDelay",
String.format("%d", network.getMaxPayloadPacketDelay())
)
);
getChildren().add(
new ReportNode(
this,
"noc/maxPayloadPacketHops",
String.format("%d", network.getMaxPayloadPacketHops())
)
);
for(FlitState state : FlitState.values()) {
getChildren().add(
new ReportNode(
this,
String.format("noc/averageFlitPerStateDelay::%s", state),
String.format("%s", network.averageFlitPerStateDelay(state))
)
);
}
for(FlitState state : FlitState.values()) {
getChildren().add(
new ReportNode(
this,
String.format("noc/maxFlitPerStateDelay::%s", state),
network.getMaxFlitPerStateDelay().containsKey(state) ? String.format("%d", network.getMaxFlitPerStateDelay().get(state)) : String.format("%s", 0.0)
)
);
}
}});
}
}
| src/main/java/archimulator/uncore/net/NoCMemoryHierarchy.java | /**
* ****************************************************************************
* Copyright (c) 2010-2016 by Min Cai ([email protected]).
* <p>
* This file is part of the Archimulator multicore architectural simulator.
* <p>
* Archimulator is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* <p>
* Archimulator is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* <p>
* You should have received a copy of the GNU General Public License
* along with Archimulator. If not, see <http://www.gnu.org/licenses/>.
* ****************************************************************************
*/
package archimulator.uncore.net;
import archimulator.common.Experiment;
import archimulator.common.Simulation;
import archimulator.common.SimulationEvent;
import archimulator.common.report.ReportNode;
import archimulator.common.report.Reportable;
import archimulator.uncore.AbstractMemoryHierarchy;
import archimulator.uncore.MemoryDevice;
import archimulator.uncore.net.noc.Network;
import archimulator.uncore.net.noc.routers.FlitState;
import archimulator.uncore.net.noc.routing.OddEvenTurnBasedRoutingAlgorithm;
import archimulator.uncore.net.noc.selection.aco.ACONode;
import archimulator.util.event.BlockingEventDispatcher;
import archimulator.util.event.CycleAccurateEventQueue;
/**
* NoC memory hierarchy.
*
* @author Min Cai
*/
public class NoCMemoryHierarchy extends AbstractMemoryHierarchy implements Reportable {
private NoCNet net;
/**
* Create a basic memory hierarchy.
*
* @param experiment the experiment
* @param simulation the simulation
* @param blockingEventDispatcher the blocking event dispatcher
* @param cycleAccurateEventQueue the cycle accurate event queue
*/
public NoCMemoryHierarchy(Experiment experiment, Simulation simulation, BlockingEventDispatcher<SimulationEvent> blockingEventDispatcher, CycleAccurateEventQueue cycleAccurateEventQueue) {
super(experiment, simulation, blockingEventDispatcher, cycleAccurateEventQueue);
this.net = new NoCNet(this);
}
@Override
public Net getNet(MemoryDevice from, MemoryDevice to) {
return net;
}
@Override
public void dumpStats(ReportNode reportNode) {
reportNode.getChildren().add(new ReportNode(reportNode, getName()) {{
Network<ACONode, OddEvenTurnBasedRoutingAlgorithm> network = net.getNetwork();
getChildren().add(new ReportNode(this, "numPacketsReceived", network.getNumPacketsReceived() + ""));
getChildren().add(new ReportNode(this, "numPacketsTransmitted", network.getNumPacketsTransmitted() + ""));
getChildren().add(new ReportNode(this, "throughput", network.throughput() + ""));
getChildren().add(new ReportNode(this, "averagePacketDelay", network.averagePacketDelay() + ""));
getChildren().add(new ReportNode(this, "averagePacketHops", network.averagePacketHops() + ""));
getChildren().add(new ReportNode(this, "maxPacketDelay", network.getMaxPacketDelay() + ""));
getChildren().add(new ReportNode(this, "maxPacketHops", network.getMaxPacketHops() + ""));
getChildren().add(new ReportNode(this, "numPayloadPacketsReceived", network.getNumPayloadPacketsReceived() + ""));
getChildren().add(new ReportNode(this, "numPayloadPacketsTransmitted", network.getNumPayloadPacketsTransmitted() + ""));
getChildren().add(new ReportNode(this, "payloadThroughput", network.payloadThroughput() + ""));
getChildren().add(new ReportNode(this, "averagePayloadPacketDelay", network.averagePayloadPacketDelay() + ""));
getChildren().add(new ReportNode(this, "averagePayloadPacketHops", network.averagePayloadPacketHops() + ""));
getChildren().add(new ReportNode(this, "maxPayloadPacketDelay", network.getMaxPayloadPacketDelay() + ""));
getChildren().add(new ReportNode(this, "maxPayloadPacketHops", network.getMaxPayloadPacketHops() + ""));
for(FlitState state : FlitState.values()) {
getChildren().add(new ReportNode(this, String.format("averageFlitPerStateDelay::%s", state),
network.averageFlitPerStateDelay(state) + ""));
}
for(FlitState state : FlitState.values()) {
getChildren().add(new ReportNode(this, String.format("maxFlitPerStateDelay::%s", state),
network.getMaxFlitPerStateDelay().containsKey(state) ? network.getMaxFlitPerStateDelay().get(state) + "" : 0.0 + ""));
}
}});
}
}
| Update dumpStats(..) code in NoCMemoryHierarchy.
| src/main/java/archimulator/uncore/net/NoCMemoryHierarchy.java | Update dumpStats(..) code in NoCMemoryHierarchy. | <ide><path>rc/main/java/archimulator/uncore/net/NoCMemoryHierarchy.java
<ide> reportNode.getChildren().add(new ReportNode(reportNode, getName()) {{
<ide> Network<ACONode, OddEvenTurnBasedRoutingAlgorithm> network = net.getNetwork();
<ide>
<del> getChildren().add(new ReportNode(this, "numPacketsReceived", network.getNumPacketsReceived() + ""));
<del> getChildren().add(new ReportNode(this, "numPacketsTransmitted", network.getNumPacketsTransmitted() + ""));
<del> getChildren().add(new ReportNode(this, "throughput", network.throughput() + ""));
<del> getChildren().add(new ReportNode(this, "averagePacketDelay", network.averagePacketDelay() + ""));
<del> getChildren().add(new ReportNode(this, "averagePacketHops", network.averagePacketHops() + ""));
<del> getChildren().add(new ReportNode(this, "maxPacketDelay", network.getMaxPacketDelay() + ""));
<del> getChildren().add(new ReportNode(this, "maxPacketHops", network.getMaxPacketHops() + ""));
<del>
<del> getChildren().add(new ReportNode(this, "numPayloadPacketsReceived", network.getNumPayloadPacketsReceived() + ""));
<del> getChildren().add(new ReportNode(this, "numPayloadPacketsTransmitted", network.getNumPayloadPacketsTransmitted() + ""));
<del> getChildren().add(new ReportNode(this, "payloadThroughput", network.payloadThroughput() + ""));
<del> getChildren().add(new ReportNode(this, "averagePayloadPacketDelay", network.averagePayloadPacketDelay() + ""));
<del> getChildren().add(new ReportNode(this, "averagePayloadPacketHops", network.averagePayloadPacketHops() + ""));
<del> getChildren().add(new ReportNode(this, "maxPayloadPacketDelay", network.getMaxPayloadPacketDelay() + ""));
<del> getChildren().add(new ReportNode(this, "maxPayloadPacketHops", network.getMaxPayloadPacketHops() + ""));
<add> getChildren().add(
<add> new ReportNode(
<add> this,
<add> "noc/numPacketsReceived",
<add> String.format("%d", network.getNumPacketsReceived())
<add> )
<add> );
<add>
<add> getChildren().add(
<add> new ReportNode(
<add> this,
<add> "noc/numPacketsTransmitted",
<add> String.format("%d", network.getNumPacketsTransmitted())
<add> )
<add> );
<add>
<add> getChildren().add(
<add> new ReportNode(
<add> this,
<add> "noc/throughput",
<add> String.format("%s", network.throughput())
<add> )
<add> );
<add>
<add> getChildren().add(
<add> new ReportNode(
<add> this,
<add> "noc/averagePacketDelay",
<add> String.format("%s", network.averagePacketDelay())
<add> )
<add> );
<add>
<add> getChildren().add(
<add> new ReportNode(
<add> this,
<add> "noc/averagePacketHops",
<add> String.format("%s", network.averagePacketHops())
<add> )
<add> );
<add>
<add> getChildren().add(
<add> new ReportNode(
<add> this,
<add> "noc/maxPacketDelay",
<add> String.format("%d", network.getMaxPacketDelay())
<add> )
<add> );
<add>
<add> getChildren().add(
<add> new ReportNode(
<add> this,
<add> "noc/maxPacketHops",
<add> String.format("%d", network.getMaxPacketHops())
<add> )
<add> );
<add>
<add> getChildren().add(
<add> new ReportNode(
<add> this,
<add> "noc/numPayloadPacketsReceived",
<add> String.format("%d", network.getNumPayloadPacketsReceived())
<add> )
<add> );
<add>
<add> getChildren().add(
<add> new ReportNode(
<add> this,
<add> "noc/numPayloadPacketsTransmitted",
<add> String.format("%d", network.getNumPayloadPacketsTransmitted())
<add> )
<add> );
<add>
<add> getChildren().add(
<add> new ReportNode(
<add> this,
<add> "noc/payloadThroughput",
<add> String.format("%s", network.payloadThroughput())
<add> )
<add> );
<add>
<add> getChildren().add(
<add> new ReportNode(
<add> this,
<add> "noc/averagePayloadPacketDelay",
<add> String.format("%s", network.averagePayloadPacketDelay())
<add> )
<add> );
<add>
<add> getChildren().add(
<add> new ReportNode(
<add> this,
<add> "noc/averagePayloadPacketHops",
<add> String.format("%s", network.averagePayloadPacketHops())
<add> )
<add> );
<add>
<add> getChildren().add(
<add> new ReportNode(
<add> this,
<add> "noc/maxPayloadPacketDelay",
<add> String.format("%d", network.getMaxPayloadPacketDelay())
<add> )
<add> );
<add>
<add> getChildren().add(
<add> new ReportNode(
<add> this,
<add> "noc/maxPayloadPacketHops",
<add> String.format("%d", network.getMaxPayloadPacketHops())
<add> )
<add> );
<ide>
<ide> for(FlitState state : FlitState.values()) {
<del> getChildren().add(new ReportNode(this, String.format("averageFlitPerStateDelay::%s", state),
<del> network.averageFlitPerStateDelay(state) + ""));
<add> getChildren().add(
<add> new ReportNode(
<add> this,
<add> String.format("noc/averageFlitPerStateDelay::%s", state),
<add> String.format("%s", network.averageFlitPerStateDelay(state))
<add> )
<add> );
<ide> }
<ide>
<ide> for(FlitState state : FlitState.values()) {
<del> getChildren().add(new ReportNode(this, String.format("maxFlitPerStateDelay::%s", state),
<del> network.getMaxFlitPerStateDelay().containsKey(state) ? network.getMaxFlitPerStateDelay().get(state) + "" : 0.0 + ""));
<add> getChildren().add(
<add> new ReportNode(
<add> this,
<add> String.format("noc/maxFlitPerStateDelay::%s", state),
<add> network.getMaxFlitPerStateDelay().containsKey(state) ? String.format("%d", network.getMaxFlitPerStateDelay().get(state)) : String.format("%s", 0.0)
<add> )
<add> );
<ide> }
<ide> }});
<ide> } |
|
Java | epl-1.0 | f0593632a855db2acf5858d2df217425aad2ee0c | 0 | yannick-mayeur/january,IanMayo/january,SmithRWORNL/january,IanMayo/january,SmithRWORNL/january,yannick-mayeur/january,yannick-mayeur/january,SmithRWORNL/january,yannick-mayeur/january,IanMayo/january,IanMayo/january | /*******************************************************************************
* Copyright (c) 2012, 2014- UT-Battelle, LLC.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Initial API and implementation and/or initial documentation - Jay Jay Billings,
* Jordan H. Deyton, Dasha Gorin, Alexander J. McCaskey, Taylor Patterson,
* Claire Saunders, Matthew Wang, Anna Wojtowicz
*******************************************************************************/
package org.eclipse.ice.datastructures.form;
import org.eclipse.ice.datastructures.ICEObject.IUpdateableListener;
import org.eclipse.ice.datastructures.ICEObject.ListComponent;
import java.util.ArrayList;
import org.eclipse.ice.datastructures.componentVisitor.IComponentVisitor;
import org.eclipse.ice.datastructures.resource.ICEResource;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
/**
* The ResourceComponent is a specialization of ListComponent that is used to
* manage a set of ICEResources. It is used, for example, to collect Resources
* for output data on a Form. ICEResources can be very easily added to
* ResourceComponents by calling the addResource() operation and the whole list
* of managed ICEResources can be retrieved with getResources().
*
*
*
* @author Jay Jay Billings, Anna Wojtowicz
*/
@XmlRootElement(name = "ResourceComponent")
public class ResourceComponent extends ListComponent<ICEResource> {
/**
* The set of IUpdateableListeners observing the ResourceComponent.
*/
@XmlTransient
protected ArrayList<IUpdateableListener> listeners;
/**
* The constructor.
*/
public ResourceComponent() {
// Setup the listeners list
listeners = new ArrayList<IUpdateableListener>();
return;
}
/**
* This operation adds an ICEResource to the component.
*
* @param resource
* The new resource to add.
*/
public void addResource(ICEResource resource) {
// Add the resource if it is good
if (resource != null) {
this.add(resource);
}
return;
}
/**
* This operation removes an ICEResource from the component.
*
* @param resource
* The resource to remove.
*/
public void removeResource(ICEResource resource) {
this.remove(resource);
return;
}
/**
* This operation gets all of the ICEResources from the component.
*
* @return The list of ICEResources contained by the ResourceComponent.
*/
public ArrayList<ICEResource> getResources() {
return new ArrayList<ICEResource>(this);
}
/**
* An operation that clears all the ICEResources stored on the
* ResourceComponent. If there are no items in the list, this operation does
* nothing.
*/
public void clearResources() {
this.clear();
return;
}
/**
* This operation provides a deep copy of the ResourceComponent.
*
* @return The clone of this ResourceComponent.
*/
public Object clone() {
// Create a new instance of ResourceComponent and copy contents
ResourceComponent outputComponent = new ResourceComponent();
outputComponent.copy(this);
return outputComponent;
}
/**
* This operation is used to check equality between the ResourceComponent
* and another ResourceComponent. It returns true if the Components are
* equal and false if they are not.
*
* @param otherResourceComponent
* The other ResourceComponent whose information should be
* compared to this ResourceComponent.
* @return True if the ResourceComponents are equal, false otherwise.
*/
public boolean equals(Object otherResourceComponent) {
// Check if they are the same reference in memory
if (this == otherResourceComponent) {
return true;
}
// Check that the object is not null, and that it is an instance of
// ResourceComponent
boolean retVal = false;
if (otherResourceComponent != null
&& otherResourceComponent instanceof ResourceComponent) {
// Call the super equals to check the list contents
retVal = super.equals(otherResourceComponent);
}
return retVal;
}
/**
* Accepts a visitor to reveal the type of the ResourceComponent.
*/
@Override
public void accept(IComponentVisitor visitor) {
// Reveal our type to the visitor
visitor.visit(this);
return;
}
// /**
// * This protected operation notifies the listeners of the ResourceComponent
// * that its state has changed.
// */
// protected void notifyListeners() {
//
// // Only process the update if there are listeners
// if (listeners != null && !listeners.isEmpty()) {
// // Create a thread on which to notify the listeners.
// Thread notifierThread = new Thread() {
// @Override
// public void run() {
// // Loop over all listeners and update them
// for (int i = 0; i < listeners.size(); i++) {
// listeners.get(i).update(ResourceComponent.this);
// }
// return;
// }
// };
//
// // Launch the thread and do the notifications
// notifierThread.start();
// }
//
// return;
// }
} | src/org.eclipse.ice.datastructures/src/org/eclipse/ice/datastructures/form/ResourceComponent.java | /*******************************************************************************
* Copyright (c) 2012, 2014- UT-Battelle, LLC.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Initial API and implementation and/or initial documentation - Jay Jay Billings,
* Jordan H. Deyton, Dasha Gorin, Alexander J. McCaskey, Taylor Patterson,
* Claire Saunders, Matthew Wang, Anna Wojtowicz
*******************************************************************************/
package org.eclipse.ice.datastructures.form;
import org.eclipse.ice.datastructures.ICEObject.IUpdateableListener;
import org.eclipse.ice.datastructures.ICEObject.ListComponent;
import java.util.ArrayList;
import org.eclipse.ice.datastructures.componentVisitor.IComponentVisitor;
import org.eclipse.ice.datastructures.resource.ICEResource;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
/**
* The ResourceComponent is a specialization of ListComponent that is used to
* manage a set of ICEResources. It is used, for example, to collect Resources
* for output data on a Form. ICEResources can be very easily added to
* ResourceComponents by calling the addResource() operation and the whole list
* of managed ICEResources can be retrieved with getResources().
*
*
*
* @author Jay Jay Billings, Anna Wojtowicz
*/
@XmlRootElement(name = "ResourceComponent")
public class ResourceComponent extends ListComponent<ICEResource> {
/**
* The set of IUpdateableListeners observing the ResourceComponent.
*/
@XmlTransient
protected ArrayList<IUpdateableListener> listeners;
/**
* The constructor.
*/
public ResourceComponent() {
// Setup the listeners list
listeners = new ArrayList<IUpdateableListener>();
return;
}
/**
* This operation adds an ICEResource to the component.
*
* @param resource
* The new resource to add.
*/
public void addResource(ICEResource resource) {
// Add the resource if it is good
if (resource != null) {
this.add(resource);
}
return;
}
/**
* This operation gets all of the ICEResources from the component.
*
* @return The list of ICEResources contained by the ResourceComponent.
*/
public ArrayList<ICEResource> getResources() {
return new ArrayList<ICEResource>(this);
}
/**
* An operation that clears all the ICEResources stored on the
* ResourceComponent. If there are no items in the list, this operation does
* nothing.
*/
public void clearResources() {
this.clear();
return;
}
/**
* This operation provides a deep copy of the ResourceComponent.
*
* @return The clone of this ResourceComponent.
*/
public Object clone() {
// Create a new instance of ResourceComponent and copy contents
ResourceComponent outputComponent = new ResourceComponent();
outputComponent.copy(this);
return outputComponent;
}
/**
* This operation is used to check equality between the ResourceComponent
* and another ResourceComponent. It returns true if the Components are
* equal and false if they are not.
*
* @param otherResourceComponent
* The other ResourceComponent whose information should be
* compared to this ResourceComponent.
* @return True if the ResourceComponents are equal, false otherwise.
*/
public boolean equals(Object otherResourceComponent) {
// Check if they are the same reference in memory
if (this == otherResourceComponent) {
return true;
}
// Check that the object is not null, and that it is an instance of
// ResourceComponent
boolean retVal = false;
if (otherResourceComponent != null
&& otherResourceComponent instanceof ResourceComponent) {
// Call the super equals to check the list contents
retVal = super.equals(otherResourceComponent);
}
return retVal;
}
/**
* Accepts a visitor to reveal the type of the ResourceComponent.
*/
@Override
public void accept(IComponentVisitor visitor) {
// Reveal our type to the visitor
visitor.visit(this);
return;
}
// /**
// * This protected operation notifies the listeners of the ResourceComponent
// * that its state has changed.
// */
// protected void notifyListeners() {
//
// // Only process the update if there are listeners
// if (listeners != null && !listeners.isEmpty()) {
// // Create a thread on which to notify the listeners.
// Thread notifierThread = new Thread() {
// @Override
// public void run() {
// // Loop over all listeners and update them
// for (int i = 0; i < listeners.size(); i++) {
// listeners.get(i).update(ResourceComponent.this);
// }
// return;
// }
// };
//
// // Launch the thread and do the notifications
// notifierThread.start();
// }
//
// return;
// }
} | Fixes bugs 469299 and 469302, the file viewer delete button not working
and the CSV Plot Editor not being openable.
Signed-off-by: r8s <[email protected]> | src/org.eclipse.ice.datastructures/src/org/eclipse/ice/datastructures/form/ResourceComponent.java | Fixes bugs 469299 and 469302, the file viewer delete button not working and the CSV Plot Editor not being openable. | <ide><path>rc/org.eclipse.ice.datastructures/src/org/eclipse/ice/datastructures/form/ResourceComponent.java
<ide> this.add(resource);
<ide> }
<ide>
<add> return;
<add> }
<add>
<add> /**
<add> * This operation removes an ICEResource from the component.
<add> *
<add> * @param resource
<add> * The resource to remove.
<add> */
<add> public void removeResource(ICEResource resource) {
<add> this.remove(resource);
<ide> return;
<ide> }
<ide> |
|
Java | apache-2.0 | 5fa2d9b22b0ae23b1c8b22a54c51fac76633045e | 0 | sekikn/ambari,radicalbit/ambari,radicalbit/ambari,sekikn/ambari,sekikn/ambari,radicalbit/ambari,sekikn/ambari,radicalbit/ambari,radicalbit/ambari,sekikn/ambari,radicalbit/ambari,sekikn/ambari,radicalbit/ambari,radicalbit/ambari,radicalbit/ambari,sekikn/ambari,sekikn/ambari,radicalbit/ambari,sekikn/ambari,radicalbit/ambari | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.controller.internal;
import com.google.inject.Binder;
import com.google.inject.Guice;
import com.google.inject.Module;
import com.google.inject.util.Modules;
import org.apache.ambari.server.controller.AmbariManagementController;
import org.apache.ambari.server.controller.ConfigGroupRequest;
import org.apache.ambari.server.controller.ConfigGroupResponse;
import org.apache.ambari.server.controller.RequestStatusResponse;
import org.apache.ambari.server.controller.spi.NoSuchResourceException;
import org.apache.ambari.server.controller.spi.Predicate;
import org.apache.ambari.server.controller.spi.Request;
import org.apache.ambari.server.controller.spi.Resource;
import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
import org.apache.ambari.server.controller.spi.ResourceProvider;
import org.apache.ambari.server.controller.spi.SystemException;
import org.apache.ambari.server.controller.utilities.PredicateBuilder;
import org.apache.ambari.server.controller.utilities.PropertyHelper;
import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
import org.apache.ambari.server.orm.dao.HostDAO;
import org.apache.ambari.server.orm.entities.HostEntity;
import org.apache.ambari.server.security.TestAuthenticationFactory;
import org.apache.ambari.server.security.authorization.AuthorizationException;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
import org.apache.ambari.server.state.ConfigHelper;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.configgroup.ConfigGroup;
import org.apache.ambari.server.state.configgroup.ConfigGroupFactory;
import org.easymock.Capture;
import org.easymock.IAnswer;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static junit.framework.Assert.*;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.capture;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.createNiceMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.newCapture;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
import static org.easymock.EasyMock.createStrictMock;
public class ConfigGroupResourceProviderTest {
private HostDAO hostDAO = null;
@Before
public void setup() throws Exception {
// Clear authenticated user so that authorization checks will pass
SecurityContextHolder.getContext().setAuthentication(null);
hostDAO = createStrictMock(HostDAO.class);
// Create injector after all mocks have been initialized
Guice.createInjector(Modules.override(
new InMemoryDefaultTestModule()).with(new MockModule()));
}
private ConfigGroupResourceProvider getConfigGroupResourceProvider
(AmbariManagementController managementController) {
Resource.Type type = Resource.Type.ConfigGroup;
return (ConfigGroupResourceProvider) AbstractControllerResourceProvider.getResourceProvider(
type,
PropertyHelper.getPropertyIds(type),
PropertyHelper.getKeyPropertyIds(type),
managementController);
}
private class MockModule implements Module {
@Override
public void configure(Binder binder) {
binder.bind(HostDAO.class).toInstance(hostDAO);
}
}
@Test
public void testCreateConfigGroupAsAmbariAdministrator() throws Exception {
testCreateConfigGroup(TestAuthenticationFactory.createAdministrator());
}
@Test
public void testCreateConfigGroupAsClusterAdministrator() throws Exception {
testCreateConfigGroup(TestAuthenticationFactory.createClusterAdministrator());
}
@Test
public void testCreateConfigGroupAsClusterOperator() throws Exception {
testCreateConfigGroup(TestAuthenticationFactory.createClusterOperator());
}
@Test
public void testCreateConfigGroupAsServiceAdministrator() throws Exception {
testCreateConfigGroup(TestAuthenticationFactory.createServiceAdministrator());
}
@Test(expected = AuthorizationException.class)
public void testCreateConfigGroupAsServiceOperator() throws Exception {
testCreateConfigGroup(TestAuthenticationFactory.createServiceOperator());
}
@Test(expected = AuthorizationException.class)
public void testCreateConfigGroupAsClusterUser() throws Exception {
testCreateConfigGroup(TestAuthenticationFactory.createClusterUser());
}
private void testCreateConfigGroup(Authentication authentication) throws Exception {
AmbariManagementController managementController = createMock(AmbariManagementController.class);
RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
Clusters clusters = createNiceMock(Clusters.class);
Cluster cluster = createNiceMock(Cluster.class);
Host h1 = createNiceMock(Host.class);
Host h2 = createNiceMock(Host.class);
HostEntity hostEntity1 = createMock(HostEntity.class);
HostEntity hostEntity2 = createMock(HostEntity.class);
ConfigGroupFactory configGroupFactory = createNiceMock(ConfigGroupFactory.class);
ConfigGroup configGroup = createNiceMock(ConfigGroup.class);
expect(managementController.getClusters()).andReturn(clusters).anyTimes();
expect(clusters.getCluster("Cluster100")).andReturn(cluster).anyTimes();
expect(clusters.getHost("h1")).andReturn(h1);
expect(clusters.getHost("h2")).andReturn(h2);
expect(managementController.getConfigGroupFactory()).andReturn(configGroupFactory);
expect(managementController.getAuthName()).andReturn("admin").anyTimes();
expect(hostDAO.findByName("h1")).andReturn(hostEntity1).atLeastOnce();
expect(hostDAO.findByName("h2")).andReturn(hostEntity2).atLeastOnce();
expect(hostEntity1.getHostId()).andReturn(1L).atLeastOnce();
expect(hostEntity2.getHostId()).andReturn(2L).atLeastOnce();
Capture<Cluster> clusterCapture = newCapture();
Capture<String> captureName = newCapture();
Capture<String> captureDesc = newCapture();
Capture<String> captureTag = newCapture();
Capture<Map<String, Config>> captureConfigs = newCapture();
Capture<Map<Long, Host>> captureHosts = newCapture();
expect(configGroupFactory.createNew(capture(clusterCapture),
capture(captureName), capture(captureTag), capture(captureDesc),
capture(captureConfigs), capture(captureHosts))).andReturn(configGroup);
replay(managementController, clusters, cluster, configGroupFactory,
configGroup, response, hostDAO, hostEntity1, hostEntity2);
ResourceProvider provider = getConfigGroupResourceProvider
(managementController);
Set<Map<String, Object>> propertySet = new LinkedHashSet<Map<String, Object>>();
Map<String, Object> properties = new LinkedHashMap<String, Object>();
Set<Map<String, Object>> hostSet = new HashSet<Map<String, Object>>();
Map<String, Object> host1 = new HashMap<String, Object>();
host1.put(ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID, "h1");
hostSet.add(host1);
Map<String, Object> host2 = new HashMap<String, Object>();
host2.put(ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID, "h2");
hostSet.add(host2);
Set<Map<String, Object>> configSet = new HashSet<Map<String, Object>>();
Map<String, String> configMap = new HashMap<String, String>();
Map<String, Object> configs = new HashMap<String, Object>();
configs.put("type", "core-site");
configs.put("tag", "version100");
configMap.put("key1", "value1");
configs.put("properties", configMap);
configSet.add(configs);
properties.put(ConfigGroupResourceProvider
.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID, "Cluster100");
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_NAME_PROPERTY_ID,
"test-1");
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID,
"tag-1");
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_HOSTS_PROPERTY_ID,
hostSet);
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_CONFIGS_PROPERTY_ID,
configSet);
propertySet.add(properties);
Request request = PropertyHelper.getCreateRequest(propertySet, null);
SecurityContextHolder.getContext().setAuthentication(authentication);
provider.createResources(request);
verify(managementController, clusters, cluster, configGroupFactory,
configGroup, response, hostDAO, hostEntity1, hostEntity2);
assertEquals("version100", captureConfigs.getValue().get("core-site")
.getTag());
assertTrue(captureHosts.getValue().containsKey(1L));
assertTrue(captureHosts.getValue().containsKey(2L));
}
@Test
public void testDuplicateNameConfigGroupAsAmbariAdministrator() throws Exception {
testDuplicateNameConfigGroup(TestAuthenticationFactory.createAdministrator());
}
@Test
public void testDuplicateNameConfigGroupAsClusterAdministrator() throws Exception {
testDuplicateNameConfigGroup(TestAuthenticationFactory.createClusterAdministrator());
}
@Test
public void testDuplicateNameConfigGroupAsClusterOperator() throws Exception {
testDuplicateNameConfigGroup(TestAuthenticationFactory.createClusterOperator());
}
@Test
public void testDuplicateNameConfigGroupAsServiceAdministrator() throws Exception {
testDuplicateNameConfigGroup(TestAuthenticationFactory.createServiceAdministrator());
}
@Test(expected = AuthorizationException.class)
public void testDuplicateNameConfigGroupAsServiceOperator() throws Exception {
testDuplicateNameConfigGroup(TestAuthenticationFactory.createServiceOperator());
}
@Test(expected = AuthorizationException.class)
public void testDuplicateNameConfigGroupAsClusterUser() throws Exception {
testDuplicateNameConfigGroup(TestAuthenticationFactory.createClusterUser());
}
private void testDuplicateNameConfigGroup(Authentication authentication) throws Exception {
AmbariManagementController managementController = createMock(AmbariManagementController.class);
RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
Clusters clusters = createNiceMock(Clusters.class);
Cluster cluster = createNiceMock(Cluster.class);
ConfigGroupFactory configGroupFactory = createNiceMock(ConfigGroupFactory.class);
ConfigGroup configGroup = createNiceMock(ConfigGroup.class);
Map<Long, ConfigGroup> configGroupMap = new HashMap<Long, ConfigGroup>();
configGroupMap.put(1L, configGroup);
expect(managementController.getClusters()).andReturn(clusters).anyTimes();
expect(clusters.getCluster("Cluster100")).andReturn(cluster).anyTimes();
expect(managementController.getConfigGroupFactory()).andReturn
(configGroupFactory).anyTimes();
expect(managementController.getAuthName()).andReturn("admin").anyTimes();
expect(cluster.getConfigGroups()).andReturn(configGroupMap);
expect(configGroupFactory.createNew((Cluster) anyObject(), (String) anyObject(),
(String) anyObject(), (String) anyObject(), (HashMap) anyObject(),
(HashMap) anyObject())).andReturn(configGroup).anyTimes();
expect(configGroup.getClusterName()).andReturn("Cluster100").anyTimes();
expect(configGroup.getName()).andReturn("test-1").anyTimes();
expect(configGroup.getTag()).andReturn("tag-1").anyTimes();
replay(managementController, clusters, cluster, configGroupFactory,
configGroup, response);
ResourceProvider provider = getConfigGroupResourceProvider
(managementController);
Map<String, Object> properties = new LinkedHashMap<String, Object>();
Set<Map<String, Object>> propertySet = new LinkedHashSet<Map<String, Object>>();
properties.put(ConfigGroupResourceProvider
.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID, "Cluster100");
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_NAME_PROPERTY_ID,
"test-1");
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID,
"tag-1");
propertySet.add(properties);
Request request = PropertyHelper.getCreateRequest(propertySet, null);
SecurityContextHolder.getContext().setAuthentication(authentication);
Exception exception = null;
try {
provider.createResources(request);
} catch (AuthorizationException e) {
throw e;
} catch (Exception e) {
exception = e;
}
verify(managementController, clusters, cluster, configGroupFactory,
configGroup, response);
assertNotNull(exception);
assertTrue(exception instanceof ResourceAlreadyExistsException);
}
@Test
public void testUpdateConfigGroupWithWrongConfigType() throws Exception {
AmbariManagementController managementController = createMock(AmbariManagementController.class);
RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
ConfigHelper configHelper = createNiceMock(ConfigHelper.class);
Clusters clusters = createNiceMock(Clusters.class);
Cluster cluster = createNiceMock(Cluster.class);
Host h1 = createNiceMock(Host.class);
Host h2 = createNiceMock(Host.class);
HostEntity hostEntity1 = createMock(HostEntity.class);
HostEntity hostEntity2 = createMock(HostEntity.class);
final ConfigGroup configGroup = createNiceMock(ConfigGroup.class);
ConfigGroupResponse configGroupResponse = createNiceMock
(ConfigGroupResponse.class);
expect(cluster.isConfigTypeExists("core-site")).andReturn(false).anyTimes();
expect(managementController.getClusters()).andReturn(clusters).anyTimes();
expect(managementController.getAuthName()).andReturn("admin").anyTimes();
expect(clusters.getCluster("Cluster100")).andReturn(cluster).anyTimes();
expect(clusters.getHost("h1")).andReturn(h1);
expect(clusters.getHost("h2")).andReturn(h2);
expect(hostDAO.findByName("h1")).andReturn(hostEntity1).anyTimes();
expect(hostDAO.findById(1L)).andReturn(hostEntity1).anyTimes();
expect(hostDAO.findByName("h2")).andReturn(hostEntity2).anyTimes();
expect(hostDAO.findById(2L)).andReturn(hostEntity2).anyTimes();
expect(hostEntity1.getHostId()).andReturn(1L).atLeastOnce();
expect(hostEntity2.getHostId()).andReturn(2L).atLeastOnce();
expect(h1.getHostId()).andReturn(1L).anyTimes();
expect(h2.getHostId()).andReturn(2L).anyTimes();
expect(configGroup.getName()).andReturn("test-1").anyTimes();
expect(configGroup.getId()).andReturn(25L).anyTimes();
expect(configGroup.getTag()).andReturn("tag-1").anyTimes();
expect(configGroup.convertToResponse()).andReturn(configGroupResponse).anyTimes();
expect(configGroupResponse.getClusterName()).andReturn("Cluster100").anyTimes();
expect(configGroupResponse.getId()).andReturn(25L).anyTimes();
expect(cluster.getConfigGroups()).andStubAnswer(new IAnswer<Map<Long, ConfigGroup>>() {
@Override
public Map<Long, ConfigGroup> answer() throws Throwable {
Map<Long, ConfigGroup> configGroupMap = new HashMap<Long, ConfigGroup>();
configGroupMap.put(configGroup.getId(), configGroup);
return configGroupMap;
}
});
replay(managementController, clusters, cluster,
configGroup, response, configGroupResponse, configHelper, hostDAO, hostEntity1, hostEntity2, h1, h2);
ResourceProvider provider = getConfigGroupResourceProvider
(managementController);
Map<String, Object> properties = new LinkedHashMap<String, Object>();
Set<Map<String, Object>> hostSet = new HashSet<Map<String, Object>>();
Map<String, Object> host1 = new HashMap<String, Object>();
host1.put(ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID, "h1");
hostSet.add(host1);
Map<String, Object> host2 = new HashMap<String, Object>();
host2.put(ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID, "h2");
hostSet.add(host2);
Set<Map<String, Object>> configSet = new HashSet<Map<String, Object>>();
Map<String, String> configMap = new HashMap<String, String>();
Map<String, Object> configs = new HashMap<String, Object>();
configs.put("type", "core-site");
configs.put("tag", "version100");
configMap.put("key1", "value1");
configs.put("properties", configMap);
configSet.add(configs);
properties.put(ConfigGroupResourceProvider
.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID, "Cluster100");
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_NAME_PROPERTY_ID,
"test-1");
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID,
"tag-1");
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_HOSTS_PROPERTY_ID,
hostSet);
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_CONFIGS_PROPERTY_ID,
configSet);
Map<String, String> mapRequestProps = new HashMap<String, String>();
mapRequestProps.put("context", "Called from a test");
Request request = PropertyHelper.getUpdateRequest(properties, mapRequestProps);
Predicate predicate = new PredicateBuilder().property
(ConfigGroupResourceProvider.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals
("Cluster100").and().
property(ConfigGroupResourceProvider.CONFIGGROUP_ID_PROPERTY_ID).equals
(25L).toPredicate();
SecurityContextHolder.getContext().setAuthentication(TestAuthenticationFactory.createAdministrator());
SystemException systemException = null;
try {
provider.updateResources(request, predicate);
} catch (SystemException e) {
systemException = e;
}
assertNotNull(systemException);
verify(managementController, clusters, cluster,
configGroup, response, configGroupResponse, configHelper, hostDAO, hostEntity1, hostEntity2, h1, h2);
}
@Test
public void testUpdateConfigGroupAsAmbariAdministrator() throws Exception {
testUpdateConfigGroup(TestAuthenticationFactory.createAdministrator());
}
@Test
public void testUpdateConfigGroupAsClusterAdministrator() throws Exception {
testUpdateConfigGroup(TestAuthenticationFactory.createClusterAdministrator());
}
@Test
public void testUpdateConfigGroupAsClusterOperator() throws Exception {
testUpdateConfigGroup(TestAuthenticationFactory.createClusterOperator());
}
@Test
public void testUpdateConfigGroupAsServiceAdministrator() throws Exception {
testUpdateConfigGroup(TestAuthenticationFactory.createServiceAdministrator());
}
@Test(expected = AuthorizationException.class)
public void testUpdateConfigGroupAsServiceOperator() throws Exception {
testUpdateConfigGroup(TestAuthenticationFactory.createServiceOperator());
}
@Test(expected = AuthorizationException.class)
public void testUpdateConfigGroupAsClusterUser() throws Exception {
testUpdateConfigGroup(TestAuthenticationFactory.createClusterUser());
}
private void testUpdateConfigGroup(Authentication authentication) throws Exception {
AmbariManagementController managementController = createMock(AmbariManagementController.class);
RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
ConfigHelper configHelper = createNiceMock(ConfigHelper.class);
Clusters clusters = createNiceMock(Clusters.class);
Cluster cluster = createNiceMock(Cluster.class);
Host h1 = createNiceMock(Host.class);
Host h2 = createNiceMock(Host.class);
HostEntity hostEntity1 = createMock(HostEntity.class);
HostEntity hostEntity2 = createMock(HostEntity.class);
final ConfigGroup configGroup = createNiceMock(ConfigGroup.class);
ConfigGroupResponse configGroupResponse = createNiceMock
(ConfigGroupResponse.class);
expect(cluster.isConfigTypeExists("core-site")).andReturn(true).anyTimes();
expect(managementController.getClusters()).andReturn(clusters).anyTimes();
expect(managementController.getAuthName()).andReturn("admin").anyTimes();
expect(clusters.getCluster("Cluster100")).andReturn(cluster).anyTimes();
expect(clusters.getHost("h1")).andReturn(h1);
expect(clusters.getHost("h2")).andReturn(h2);
expect(hostDAO.findByName("h1")).andReturn(hostEntity1).anyTimes();
expect(hostDAO.findById(1L)).andReturn(hostEntity1).anyTimes();
expect(hostDAO.findByName("h2")).andReturn(hostEntity2).anyTimes();
expect(hostDAO.findById(2L)).andReturn(hostEntity2).anyTimes();
expect(hostEntity1.getHostId()).andReturn(1L).atLeastOnce();
expect(hostEntity2.getHostId()).andReturn(2L).atLeastOnce();
expect(h1.getHostId()).andReturn(1L).anyTimes();
expect(h2.getHostId()).andReturn(2L).anyTimes();
expect(configGroup.getName()).andReturn("test-1").anyTimes();
expect(configGroup.getId()).andReturn(25L).anyTimes();
expect(configGroup.getTag()).andReturn("tag-1").anyTimes();
expect(configGroup.convertToResponse()).andReturn(configGroupResponse).anyTimes();
expect(configGroupResponse.getClusterName()).andReturn("Cluster100").anyTimes();
expect(configGroupResponse.getId()).andReturn(25L).anyTimes();
expect(cluster.getConfigGroups()).andStubAnswer(new IAnswer<Map<Long, ConfigGroup>>() {
@Override
public Map<Long, ConfigGroup> answer() throws Throwable {
Map<Long, ConfigGroup> configGroupMap = new HashMap<Long, ConfigGroup>();
configGroupMap.put(configGroup.getId(), configGroup);
return configGroupMap;
}
});
replay(managementController, clusters, cluster,
configGroup, response, configGroupResponse, configHelper, hostDAO, hostEntity1, hostEntity2, h1, h2);
ResourceProvider provider = getConfigGroupResourceProvider
(managementController);
Map<String, Object> properties = new LinkedHashMap<String, Object>();
Set<Map<String, Object>> hostSet = new HashSet<Map<String, Object>>();
Map<String, Object> host1 = new HashMap<String, Object>();
host1.put(ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID, "h1");
hostSet.add(host1);
Map<String, Object> host2 = new HashMap<String, Object>();
host2.put(ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID, "h2");
hostSet.add(host2);
Set<Map<String, Object>> configSet = new HashSet<Map<String, Object>>();
Map<String, String> configMap = new HashMap<String, String>();
Map<String, Object> configs = new HashMap<String, Object>();
configs.put("type", "core-site");
configs.put("tag", "version100");
configMap.put("key1", "value1");
configs.put("properties", configMap);
configSet.add(configs);
properties.put(ConfigGroupResourceProvider
.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID, "Cluster100");
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_NAME_PROPERTY_ID,
"test-1");
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID,
"tag-1");
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_HOSTS_PROPERTY_ID,
hostSet);
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_CONFIGS_PROPERTY_ID,
configSet);
Map<String, String> mapRequestProps = new HashMap<String, String>();
mapRequestProps.put("context", "Called from a test");
Request request = PropertyHelper.getUpdateRequest(properties, mapRequestProps);
Predicate predicate = new PredicateBuilder().property
(ConfigGroupResourceProvider.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals
("Cluster100").and().
property(ConfigGroupResourceProvider.CONFIGGROUP_ID_PROPERTY_ID).equals
(25L).toPredicate();
SecurityContextHolder.getContext().setAuthentication(authentication);
provider.updateResources(request, predicate);
verify(managementController, clusters, cluster,
configGroup, response, configGroupResponse, configHelper, hostDAO, hostEntity1, hostEntity2, h1, h2);
}
@Test
public void testGetConfigGroupAsAmbariAdministrator() throws Exception {
testGetConfigGroup(TestAuthenticationFactory.createAdministrator());
}
@Test
public void testGetConfigGroupAsClusterAdministrator() throws Exception {
testGetConfigGroup(TestAuthenticationFactory.createClusterAdministrator());
}
@Test
public void testGetConfigGroupAsClusterOperator() throws Exception {
testGetConfigGroup(TestAuthenticationFactory.createClusterOperator());
}
@Test
public void testGetConfigGroupAsServiceAdministrator() throws Exception {
testGetConfigGroup(TestAuthenticationFactory.createServiceAdministrator());
}
@Test
public void testGetConfigGroupAsServiceOperator() throws Exception {
testGetConfigGroup(TestAuthenticationFactory.createServiceOperator());
}
@Test
public void testGetConfigGroupAsClusterUser() throws Exception {
testGetConfigGroup(TestAuthenticationFactory.createClusterUser());
}
@SuppressWarnings("unchecked")
private void testGetConfigGroup(Authentication authentication) throws Exception {
AmbariManagementController managementController = createMock(AmbariManagementController.class);
Clusters clusters = createNiceMock(Clusters.class);
Cluster cluster = createNiceMock(Cluster.class);
Host h1 = createNiceMock(Host.class);
final Long host1Id = 1L;
List<Long> hostIds = new ArrayList<Long>() {{
add(host1Id);
}};
List<String> hostNames = new ArrayList<String>() {{
add("h1");
}};
HostEntity hostEntity1 = createMock(HostEntity.class);
expect(hostDAO.getHostNamesByHostIds(hostIds)).andReturn(hostNames).atLeastOnce();
expect(hostDAO.findByName("h1")).andReturn(hostEntity1).anyTimes();
expect(hostEntity1.getHostId()).andReturn(host1Id).anyTimes();
ConfigGroup configGroup1 = createNiceMock(ConfigGroup.class);
ConfigGroup configGroup2 = createNiceMock(ConfigGroup.class);
ConfigGroup configGroup3 = createNiceMock(ConfigGroup.class);
ConfigGroup configGroup4 = createNiceMock(ConfigGroup.class);
ConfigGroupResponse response1 = createNiceMock(ConfigGroupResponse.class);
ConfigGroupResponse response2 = createNiceMock(ConfigGroupResponse.class);
ConfigGroupResponse response3 = createNiceMock(ConfigGroupResponse.class);
ConfigGroupResponse response4 = createNiceMock(ConfigGroupResponse.class);
Map<Long, ConfigGroup> configGroupMap = new HashMap<Long, ConfigGroup>();
configGroupMap.put(1L, configGroup1);
configGroupMap.put(2L, configGroup2);
configGroupMap.put(3L, configGroup3);
configGroupMap.put(4L, configGroup4);
Map<Long, ConfigGroup> configGroupByHostname = new HashMap<Long, ConfigGroup>();
configGroupByHostname.put(4L, configGroup4);
expect(configGroup1.convertToResponse()).andReturn(response1).anyTimes();
expect(configGroup2.convertToResponse()).andReturn(response2).anyTimes();
expect(configGroup3.convertToResponse()).andReturn(response3).anyTimes();
expect(configGroup4.convertToResponse()).andReturn(response4).anyTimes();
expect(managementController.getClusters()).andReturn(clusters).anyTimes();
expect(clusters.getCluster("Cluster100")).andReturn(cluster).anyTimes();
expect(cluster.getConfigGroups()).andReturn(configGroupMap).anyTimes();
expect(cluster.getClusterName()).andReturn("Cluster100").anyTimes();
expect(configGroup1.getName()).andReturn("g1").anyTimes();
expect(configGroup2.getName()).andReturn("g2").anyTimes();
expect(configGroup3.getName()).andReturn("g3").anyTimes();
expect(configGroup4.getName()).andReturn("g4").anyTimes();
expect(configGroup1.getTag()).andReturn("t1").anyTimes();
expect(configGroup2.getTag()).andReturn("t2").anyTimes();
expect(configGroup3.getTag()).andReturn("t3").anyTimes();
expect(configGroup4.getTag()).andReturn("t4").anyTimes();
Map<Long, Host> hostMap = new HashMap<Long, Host>();
hostMap.put(host1Id, h1);
expect(configGroup4.getHosts()).andReturn(hostMap).anyTimes();
expect(response1.getClusterName()).andReturn("Cluster100").anyTimes();
expect(response2.getClusterName()).andReturn("Cluster100").anyTimes();
expect(response3.getClusterName()).andReturn("Cluster100").anyTimes();
expect(response4.getClusterName()).andReturn("Cluster100").anyTimes();
expect(response1.getId()).andReturn(1L).anyTimes();
expect(response2.getId()).andReturn(2L).anyTimes();
expect(response3.getId()).andReturn(3L).anyTimes();
expect(response4.getId()).andReturn(4L).anyTimes();
expect(response2.getGroupName()).andReturn("g2").anyTimes();
expect(response3.getTag()).andReturn("t3").anyTimes();
expect(cluster.getConfigGroupsByHostname("h1")).andReturn(configGroupByHostname).anyTimes();
Set<Map<String, Object>> hostObj = new HashSet<Map<String, Object>>();
Map<String, Object> hostnames = new HashMap<String, Object>();
hostnames.put("host_name", "h1");
hostObj.add(hostnames);
expect(response4.getHosts()).andReturn(hostObj).anyTimes();
replay(managementController, clusters, cluster, hostDAO, hostEntity1,
configGroup1, configGroup2, configGroup3, configGroup4, response1, response2, response3, response4);
SecurityContextHolder.getContext().setAuthentication(authentication);
ResourceProvider resourceProvider = getConfigGroupResourceProvider(managementController);
Set<String> propertyIds = new HashSet<String>();
propertyIds.add(ConfigGroupResourceProvider.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID);
propertyIds.add(ConfigGroupResourceProvider.CONFIGGROUP_ID_PROPERTY_ID);
// Read all
Predicate predicate = new PredicateBuilder().property
(ConfigGroupResourceProvider.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID)
.equals("Cluster100").toPredicate();
Request request = PropertyHelper.getReadRequest(propertyIds);
Set<Resource> resources = resourceProvider.getResources(request, predicate);
assertEquals(4, resources.size());
// Read by id
predicate = new PredicateBuilder().property(ConfigGroupResourceProvider
.CONFIGGROUP_ID_PROPERTY_ID).equals(1L).and().property
(ConfigGroupResourceProvider.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID)
.equals("Cluster100").toPredicate();
resources = resourceProvider.getResources(request, predicate);
assertEquals(1, resources.size());
assertEquals(1L, resources.iterator().next().getPropertyValue
(ConfigGroupResourceProvider.CONFIGGROUP_ID_PROPERTY_ID));
// Read by Name
predicate = new PredicateBuilder().property(ConfigGroupResourceProvider
.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals("Cluster100").and()
.property(ConfigGroupResourceProvider.CONFIGGROUP_NAME_PROPERTY_ID)
.equals("g2").toPredicate();
resources = resourceProvider.getResources(request, predicate);
assertEquals(1, resources.size());
assertEquals("g2", resources.iterator().next().getPropertyValue
(ConfigGroupResourceProvider.CONFIGGROUP_NAME_PROPERTY_ID));
// Read by tag
predicate = new PredicateBuilder().property(ConfigGroupResourceProvider
.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals("Cluster100").and()
.property(ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID)
.equals("t3").toPredicate();
resources = resourceProvider.getResources(request, predicate);
assertEquals(1, resources.size());
assertEquals("t3", resources.iterator().next().getPropertyValue
(ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID));
// Read by hostname (hosts=h1)
predicate = new PredicateBuilder().property(ConfigGroupResourceProvider
.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals("Cluster100").and()
.property(ConfigGroupResourceProvider.CONFIGGROUP_HOSTS_PROPERTY_ID)
.equals("h1").toPredicate();
resources = resourceProvider.getResources(request, predicate);
assertEquals(1, resources.size());
Set<Map<String, Object>> hostSet = (Set<Map<String, Object>>)
resources.iterator().next()
.getPropertyValue(ConfigGroupResourceProvider
.CONFIGGROUP_HOSTS_PROPERTY_ID);
assertEquals("h1", hostSet.iterator().next().get
(ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID));
// Read by hostname (hosts/host_name=h1)
predicate = new PredicateBuilder().property(ConfigGroupResourceProvider
.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals("Cluster100").and()
.property(ConfigGroupResourceProvider.CONFIGGROUP_HOSTS_HOSTNAME_PROPERTY_ID)
.equals("h1").toPredicate();
resources = resourceProvider.getResources(request, predicate);
assertEquals(1, resources.size());
hostSet = (Set<Map<String, Object>>)
resources.iterator().next()
.getPropertyValue(ConfigGroupResourceProvider
.CONFIGGROUP_HOSTS_PROPERTY_ID);
assertEquals("h1", hostSet.iterator().next().get
(ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID));
// Read by tag and hostname (hosts=h1) - Positive
predicate = new PredicateBuilder().property(ConfigGroupResourceProvider
.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals("Cluster100").and()
.property(ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID)
.equals("t4").and().property(ConfigGroupResourceProvider
.CONFIGGROUP_HOSTS_PROPERTY_ID).equals(host1Id).toPredicate();
resources = resourceProvider.getResources(request, predicate);
assertEquals(1, resources.size());
hostSet = (Set<Map<String, Object>>)
resources.iterator().next()
.getPropertyValue(ConfigGroupResourceProvider
.CONFIGGROUP_HOSTS_PROPERTY_ID);
assertEquals("h1", hostSet.iterator().next().get
(ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID));
// Read by tag and hostname (hosts/host_name=h1) - Positive
predicate = new PredicateBuilder().property(ConfigGroupResourceProvider
.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals("Cluster100").and()
.property(ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID)
.equals("t4").and().property(ConfigGroupResourceProvider
.CONFIGGROUP_HOSTS_HOSTNAME_PROPERTY_ID).equals("h1").toPredicate();
resources = resourceProvider.getResources(request, predicate);
assertEquals(1, resources.size());
hostSet = (Set<Map<String, Object>>)
resources.iterator().next()
.getPropertyValue(ConfigGroupResourceProvider
.CONFIGGROUP_HOSTS_PROPERTY_ID);
assertEquals("h1", hostSet.iterator().next().get
(ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID));
// Read by id
predicate = new PredicateBuilder().property(ConfigGroupResourceProvider
.CONFIGGROUP_ID_PROPERTY_ID).equals(11L).and().property
(ConfigGroupResourceProvider.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID)
.equals("Cluster100").toPredicate();
NoSuchResourceException resourceException = null;
try {
resourceProvider.getResources(request, predicate);
} catch (NoSuchResourceException ce) {
resourceException = ce;
}
Assert.assertNotNull(resourceException);
verify(managementController, clusters, cluster, hostDAO, hostEntity1,
configGroup1, configGroup2, configGroup3, configGroup4, response1, response2, response3, response4);
}
@Test
public void testDeleteConfigGroupAsAmbariAdministrator() throws Exception {
testDeleteConfigGroup(TestAuthenticationFactory.createAdministrator());
}
@Test
public void testDeleteConfigGroupAsClusterAdministrator() throws Exception {
testDeleteConfigGroup(TestAuthenticationFactory.createClusterAdministrator());
}
@Test
public void testDeleteConfigGroupAsClusterOperator() throws Exception {
testDeleteConfigGroup(TestAuthenticationFactory.createClusterOperator());
}
@Test
public void testDeleteConfigGroupAsServiceAdministrator() throws Exception {
testDeleteConfigGroup(TestAuthenticationFactory.createServiceAdministrator());
}
@Test(expected = AuthorizationException.class)
public void testDeleteConfigGroupAsServiceOperator() throws Exception {
testDeleteConfigGroup(TestAuthenticationFactory.createServiceOperator());
}
@Test(expected = AuthorizationException.class)
public void testDeleteConfigGroupAsClusterUser() throws Exception {
testDeleteConfigGroup(TestAuthenticationFactory.createClusterUser());
}
private void testDeleteConfigGroup(Authentication authentication) throws Exception {
AmbariManagementController managementController = createMock(AmbariManagementController.class);
Clusters clusters = createNiceMock(Clusters.class);
Cluster cluster = createNiceMock(Cluster.class);
ConfigGroup configGroup = createNiceMock(ConfigGroup.class);
expect(managementController.getAuthName()).andReturn("admin").anyTimes();
expect(managementController.getClusters()).andReturn(clusters).anyTimes();
expect(clusters.getCluster("Cluster100")).andReturn(cluster).anyTimes();
expect(cluster.getConfigGroups()).andReturn(Collections.singletonMap(1L, configGroup));
cluster.deleteConfigGroup(1L);
replay(managementController, clusters, cluster, configGroup);
ResourceProvider resourceProvider = getConfigGroupResourceProvider
(managementController);
AbstractResourceProviderTest.TestObserver observer = new AbstractResourceProviderTest.TestObserver();
((ObservableResourceProvider) resourceProvider).addObserver(observer);
Predicate predicate = new PredicateBuilder().property
(ConfigGroupResourceProvider.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID)
.equals("Cluster100").and().property(ConfigGroupResourceProvider
.CONFIGGROUP_ID_PROPERTY_ID).equals(1L).toPredicate();
SecurityContextHolder.getContext().setAuthentication(authentication);
resourceProvider.deleteResources(new RequestImpl(null, null, null, null), predicate);
ResourceProviderEvent lastEvent = observer.getLastEvent();
Assert.assertNotNull(lastEvent);
Assert.assertEquals(Resource.Type.ConfigGroup, lastEvent.getResourceType());
Assert.assertEquals(ResourceProviderEvent.Type.Delete, lastEvent.getType());
Assert.assertEquals(predicate, lastEvent.getPredicate());
Assert.assertNull(lastEvent.getRequest());
verify(managementController, clusters, cluster, configGroup);
}
@Test
public void testGetConfigGroupRequest_populatesConfigAttributes() throws Exception {
AmbariManagementController managementController = createMock(AmbariManagementController.class);
ConfigGroupResourceProvider resourceProvider = getConfigGroupResourceProvider
(managementController);
Set<Map<String, String>> desiredConfigProperties = new HashSet<Map<String, String>>();
Map<String, String> desiredConfig1 = new HashMap<String, String>();
desiredConfig1.put("tag", "version2");
desiredConfig1.put("type", "type1");
desiredConfig1.put("properties/key1", "value1");
desiredConfig1.put("properties/key2", "value2");
desiredConfig1.put("properties_attributes/attr1/key1", "true");
desiredConfig1.put("properties_attributes/attr1/key2", "false");
desiredConfig1.put("properties_attributes/attr2/key1", "15");
desiredConfigProperties.add(desiredConfig1);
Map<String, Object> properties = new HashMap<String, Object>();
properties.put("ConfigGroup/hosts", new HashMap<String, String>() {{
put("host_name", "ambari1");
}});
properties.put("ConfigGroup/cluster_name", "c");
properties.put("ConfigGroup/desired_configs", desiredConfigProperties);
ConfigGroupRequest request = resourceProvider.getConfigGroupRequest(properties);
assertNotNull(request);
Map<String, Config> configMap = request.getConfigs();
assertNotNull(configMap);
assertEquals(1, configMap.size());
assertTrue(configMap.containsKey("type1"));
Config config = configMap.get("type1");
assertEquals("type1", config.getType());
Map<String, String> configProperties = config.getProperties();
assertNotNull(configProperties);
assertEquals(2, configProperties.size());
assertEquals("value1", configProperties.get("key1"));
assertEquals("value2", configProperties.get("key2"));
Map<String, Map<String, String>> configAttributes = config.getPropertiesAttributes();
assertNotNull(configAttributes);
assertEquals(2, configAttributes.size());
assertTrue(configAttributes.containsKey("attr1"));
Map<String, String> attr1 = configAttributes.get("attr1");
assertNotNull(attr1);
assertEquals(2, attr1.size());
assertEquals("true", attr1.get("key1"));
assertEquals("false", attr1.get("key2"));
assertTrue(configAttributes.containsKey("attr2"));
Map<String, String> attr2 = configAttributes.get("attr2");
assertNotNull(attr2);
assertEquals(1, attr2.size());
assertEquals("15", attr2.get("key1"));
}
}
| ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProviderTest.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.controller.internal;
import com.google.inject.Binder;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Module;
import com.google.inject.util.Modules;
import org.apache.ambari.server.controller.AmbariManagementController;
import org.apache.ambari.server.controller.ConfigGroupRequest;
import org.apache.ambari.server.controller.ConfigGroupResponse;
import org.apache.ambari.server.controller.RequestStatusResponse;
import org.apache.ambari.server.controller.spi.NoSuchResourceException;
import org.apache.ambari.server.controller.spi.Predicate;
import org.apache.ambari.server.controller.spi.Request;
import org.apache.ambari.server.controller.spi.Resource;
import org.apache.ambari.server.controller.spi.ResourceAlreadyExistsException;
import org.apache.ambari.server.controller.spi.ResourceProvider;
import org.apache.ambari.server.controller.spi.SystemException;
import org.apache.ambari.server.controller.utilities.PredicateBuilder;
import org.apache.ambari.server.controller.utilities.PropertyHelper;
import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
import org.apache.ambari.server.orm.dao.HostDAO;
import org.apache.ambari.server.orm.entities.HostEntity;
import org.apache.ambari.server.security.TestAuthenticationFactory;
import org.apache.ambari.server.security.authorization.AuthorizationException;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
import org.apache.ambari.server.state.ConfigHelper;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.configgroup.ConfigGroup;
import org.apache.ambari.server.state.configgroup.ConfigGroupFactory;
import org.easymock.Capture;
import org.easymock.IAnswer;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static junit.framework.Assert.*;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.capture;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.createNiceMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
import static org.easymock.EasyMock.createStrictMock;
public class ConfigGroupResourceProviderTest {
private Injector injector;
private HostDAO hostDAO = null;
@BeforeClass
public static void setupAuthentication() {
// Clear authenticated user so that authorization checks will pass
SecurityContextHolder.getContext().setAuthentication(null);
}
@Before
public void setup() throws Exception {
hostDAO = createStrictMock(HostDAO.class);
// Create injector after all mocks have been initialized
injector = Guice.createInjector(Modules.override(
new InMemoryDefaultTestModule()).with(new MockModule()));
}
ConfigGroupResourceProvider getConfigGroupResourceProvider
(AmbariManagementController managementController) {
Resource.Type type = Resource.Type.ConfigGroup;
return (ConfigGroupResourceProvider) AbstractControllerResourceProvider.getResourceProvider(
type,
PropertyHelper.getPropertyIds(type),
PropertyHelper.getKeyPropertyIds(type),
managementController);
}
private class MockModule implements Module {
@Override
public void configure(Binder binder) {
binder.bind(HostDAO.class).toInstance(hostDAO);
}
}
@Test
public void testCreateConfigGroupAsAmbariAdministrator() throws Exception {
testCreateConfigGroup(TestAuthenticationFactory.createAdministrator());
}
@Test
public void testCreateConfigGroupAsClusterAdministrator() throws Exception {
testCreateConfigGroup(TestAuthenticationFactory.createClusterAdministrator());
}
@Test
public void testCreateConfigGroupAsClusterOperator() throws Exception {
testCreateConfigGroup(TestAuthenticationFactory.createClusterOperator());
}
@Test
public void testCreateConfigGroupAsServiceAdministrator() throws Exception {
testCreateConfigGroup(TestAuthenticationFactory.createServiceAdministrator());
}
@Test(expected = AuthorizationException.class)
public void testCreateConfigGroupAsServiceOperator() throws Exception {
testCreateConfigGroup(TestAuthenticationFactory.createServiceOperator());
}
@Test(expected = AuthorizationException.class)
public void testCreateConfigGroupAsClusterUser() throws Exception {
testCreateConfigGroup(TestAuthenticationFactory.createClusterUser());
}
private void testCreateConfigGroup(Authentication authentication) throws Exception {
AmbariManagementController managementController = createMock(AmbariManagementController.class);
RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
Clusters clusters = createNiceMock(Clusters.class);
Cluster cluster = createNiceMock(Cluster.class);
Host h1 = createNiceMock(Host.class);
Host h2 = createNiceMock(Host.class);
HostEntity hostEntity1 = createMock(HostEntity.class);
HostEntity hostEntity2 = createMock(HostEntity.class);
ConfigGroupFactory configGroupFactory = createNiceMock(ConfigGroupFactory.class);
ConfigGroup configGroup = createNiceMock(ConfigGroup.class);
expect(managementController.getClusters()).andReturn(clusters).anyTimes();
expect(clusters.getCluster("Cluster100")).andReturn(cluster).anyTimes();
expect(clusters.getHost("h1")).andReturn(h1);
expect(clusters.getHost("h2")).andReturn(h2);
expect(managementController.getConfigGroupFactory()).andReturn(configGroupFactory);
expect(managementController.getAuthName()).andReturn("admin").anyTimes();
expect(hostDAO.findByName("h1")).andReturn(hostEntity1).atLeastOnce();
expect(hostDAO.findByName("h2")).andReturn(hostEntity2).atLeastOnce();
expect(hostEntity1.getHostId()).andReturn(1L).atLeastOnce();
expect(hostEntity2.getHostId()).andReturn(2L).atLeastOnce();
Capture<Cluster> clusterCapture = new Capture<Cluster>();
Capture<String> captureName = new Capture<String>();
Capture<String> captureDesc = new Capture<String>();
Capture<String> captureTag = new Capture<String>();
Capture<Map<String, Config>> captureConfigs = new Capture<Map<String,
Config>>();
Capture<Map<Long, Host>> captureHosts = new Capture<Map<Long, Host>>();
expect(configGroupFactory.createNew(capture(clusterCapture),
capture(captureName), capture(captureTag), capture(captureDesc),
capture(captureConfigs), capture(captureHosts))).andReturn(configGroup);
replay(managementController, clusters, cluster, configGroupFactory,
configGroup, response, hostDAO, hostEntity1, hostEntity2);
ResourceProvider provider = getConfigGroupResourceProvider
(managementController);
Set<Map<String, Object>> propertySet = new LinkedHashSet<Map<String, Object>>();
Map<String, Object> properties = new LinkedHashMap<String, Object>();
Set<Map<String, Object>> hostSet = new HashSet<Map<String, Object>>();
Map<String, Object> host1 = new HashMap<String, Object>();
host1.put(ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID, "h1");
hostSet.add(host1);
Map<String, Object> host2 = new HashMap<String, Object>();
host2.put(ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID, "h2");
hostSet.add(host2);
Set<Map<String, Object>> configSet = new HashSet<Map<String, Object>>();
Map<String, String> configMap = new HashMap<String, String>();
Map<String, Object> configs = new HashMap<String, Object>();
configs.put("type", "core-site");
configs.put("tag", "version100");
configMap.put("key1", "value1");
configs.put("properties", configMap);
configSet.add(configs);
properties.put(ConfigGroupResourceProvider
.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID, "Cluster100");
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_NAME_PROPERTY_ID,
"test-1");
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID,
"tag-1");
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_HOSTS_PROPERTY_ID,
hostSet);
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_CONFIGS_PROPERTY_ID,
configSet);
propertySet.add(properties);
Request request = PropertyHelper.getCreateRequest(propertySet, null);
SecurityContextHolder.getContext().setAuthentication(authentication);
provider.createResources(request);
verify(managementController, clusters, cluster, configGroupFactory,
configGroup, response, hostDAO, hostEntity1, hostEntity2);
assertEquals("version100", captureConfigs.getValue().get("core-site")
.getTag());
assertTrue(captureHosts.getValue().containsKey(1L));
assertTrue(captureHosts.getValue().containsKey(2L));
}
@Test
public void testDuplicateNameConfigGroupAsAmbariAdministrator() throws Exception {
testDuplicateNameConfigGroup(TestAuthenticationFactory.createAdministrator());
}
@Test
public void testDuplicateNameConfigGroupAsClusterAdministrator() throws Exception {
testDuplicateNameConfigGroup(TestAuthenticationFactory.createClusterAdministrator());
}
@Test
public void testDuplicateNameConfigGroupAsClusterOperator() throws Exception {
testDuplicateNameConfigGroup(TestAuthenticationFactory.createClusterOperator());
}
@Test
public void testDuplicateNameConfigGroupAsServiceAdministrator() throws Exception {
testDuplicateNameConfigGroup(TestAuthenticationFactory.createServiceAdministrator());
}
@Test(expected = AuthorizationException.class)
public void testDuplicateNameConfigGroupAsServiceOperator() throws Exception {
testDuplicateNameConfigGroup(TestAuthenticationFactory.createServiceOperator());
}
@Test(expected = AuthorizationException.class)
public void testDuplicateNameConfigGroupAsClusterUser() throws Exception {
testDuplicateNameConfigGroup(TestAuthenticationFactory.createClusterUser());
}
private void testDuplicateNameConfigGroup(Authentication authentication) throws Exception {
AmbariManagementController managementController = createMock(AmbariManagementController.class);
RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
Clusters clusters = createNiceMock(Clusters.class);
Cluster cluster = createNiceMock(Cluster.class);
ConfigGroupFactory configGroupFactory = createNiceMock(ConfigGroupFactory.class);
ConfigGroup configGroup = createNiceMock(ConfigGroup.class);
Map<Long, ConfigGroup> configGroupMap = new HashMap<Long, ConfigGroup>();
configGroupMap.put(1L, configGroup);
expect(managementController.getClusters()).andReturn(clusters).anyTimes();
expect(clusters.getCluster("Cluster100")).andReturn(cluster).anyTimes();
expect(managementController.getConfigGroupFactory()).andReturn
(configGroupFactory).anyTimes();
expect(managementController.getAuthName()).andReturn("admin").anyTimes();
expect(cluster.getConfigGroups()).andReturn(configGroupMap);
expect(configGroupFactory.createNew((Cluster) anyObject(), (String) anyObject(),
(String) anyObject(), (String) anyObject(), (HashMap) anyObject(),
(HashMap) anyObject())).andReturn(configGroup).anyTimes();
expect(configGroup.getClusterName()).andReturn("Cluster100").anyTimes();
expect(configGroup.getName()).andReturn("test-1").anyTimes();
expect(configGroup.getTag()).andReturn("tag-1").anyTimes();
replay(managementController, clusters, cluster, configGroupFactory,
configGroup, response);
ResourceProvider provider = getConfigGroupResourceProvider
(managementController);
Map<String, Object> properties = new LinkedHashMap<String, Object>();
Set<Map<String, Object>> propertySet = new LinkedHashSet<Map<String, Object>>();
properties.put(ConfigGroupResourceProvider
.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID, "Cluster100");
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_NAME_PROPERTY_ID,
"test-1");
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID,
"tag-1");
propertySet.add(properties);
Request request = PropertyHelper.getCreateRequest(propertySet, null);
SecurityContextHolder.getContext().setAuthentication(authentication);
Exception exception = null;
try {
provider.createResources(request);
} catch (AuthorizationException e){
throw e;
} catch (Exception e) {
exception = e;
}
verify(managementController, clusters, cluster, configGroupFactory,
configGroup, response);
assertNotNull(exception);
assertTrue(exception instanceof ResourceAlreadyExistsException);
}
@Test
public void testUpdateConfigGroupWithWrongConfigType() throws Exception {
AmbariManagementController managementController = createMock(AmbariManagementController.class);
RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
ConfigHelper configHelper = createNiceMock(ConfigHelper.class);
Clusters clusters = createNiceMock(Clusters.class);
Cluster cluster = createNiceMock(Cluster.class);
Host h1 = createNiceMock(Host.class);
Host h2 = createNiceMock(Host.class);
HostEntity hostEntity1 = createMock(HostEntity.class);
HostEntity hostEntity2 = createMock(HostEntity.class);
final ConfigGroup configGroup = createNiceMock(ConfigGroup.class);
ConfigGroupResponse configGroupResponse = createNiceMock
(ConfigGroupResponse.class);
expect(cluster.isConfigTypeExists("core-site")).andReturn(false).anyTimes();
expect(managementController.getClusters()).andReturn(clusters).anyTimes();
expect(managementController.getAuthName()).andReturn("admin").anyTimes();
expect(clusters.getCluster("Cluster100")).andReturn(cluster).anyTimes();
expect(clusters.getHost("h1")).andReturn(h1);
expect(clusters.getHost("h2")).andReturn(h2);
expect(hostDAO.findByName("h1")).andReturn(hostEntity1).anyTimes();
expect(hostDAO.findById(1L)).andReturn(hostEntity1).anyTimes();
expect(hostDAO.findByName("h2")).andReturn(hostEntity2).anyTimes();
expect(hostDAO.findById(2L)).andReturn(hostEntity2).anyTimes();
expect(hostEntity1.getHostId()).andReturn(1L).atLeastOnce();
expect(hostEntity2.getHostId()).andReturn(2L).atLeastOnce();
expect(h1.getHostId()).andReturn(1L).anyTimes();
expect(h2.getHostId()).andReturn(2L).anyTimes();
expect(configGroup.getName()).andReturn("test-1").anyTimes();
expect(configGroup.getId()).andReturn(25L).anyTimes();
expect(configGroup.getTag()).andReturn("tag-1").anyTimes();
expect(configGroup.convertToResponse()).andReturn(configGroupResponse).anyTimes();
expect(configGroupResponse.getClusterName()).andReturn("Cluster100").anyTimes();
expect(configGroupResponse.getId()).andReturn(25L).anyTimes();
expect(cluster.getConfigGroups()).andStubAnswer(new IAnswer<Map<Long, ConfigGroup>>() {
@Override
public Map<Long, ConfigGroup> answer() throws Throwable {
Map<Long, ConfigGroup> configGroupMap = new HashMap<Long, ConfigGroup>();
configGroupMap.put(configGroup.getId(), configGroup);
return configGroupMap;
}
});
replay(managementController, clusters, cluster,
configGroup, response, configGroupResponse, configHelper, hostDAO, hostEntity1, hostEntity2, h1, h2);
ResourceProvider provider = getConfigGroupResourceProvider
(managementController);
Map<String, Object> properties = new LinkedHashMap<String, Object>();
Set<Map<String, Object>> hostSet = new HashSet<Map<String, Object>>();
Map<String, Object> host1 = new HashMap<String, Object>();
host1.put(ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID, "h1");
hostSet.add(host1);
Map<String, Object> host2 = new HashMap<String, Object>();
host2.put(ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID, "h2");
hostSet.add(host2);
Set<Map<String, Object>> configSet = new HashSet<Map<String, Object>>();
Map<String, String> configMap = new HashMap<String, String>();
Map<String, Object> configs = new HashMap<String, Object>();
configs.put("type", "core-site");
configs.put("tag", "version100");
configMap.put("key1", "value1");
configs.put("properties", configMap);
configSet.add(configs);
properties.put(ConfigGroupResourceProvider
.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID, "Cluster100");
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_NAME_PROPERTY_ID,
"test-1");
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID,
"tag-1");
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_HOSTS_PROPERTY_ID,
hostSet );
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_CONFIGS_PROPERTY_ID,
configSet);
Map<String, String> mapRequestProps = new HashMap<String, String>();
mapRequestProps.put("context", "Called from a test");
Request request = PropertyHelper.getUpdateRequest(properties, mapRequestProps);
Predicate predicate = new PredicateBuilder().property
(ConfigGroupResourceProvider.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals
("Cluster100").and().
property(ConfigGroupResourceProvider.CONFIGGROUP_ID_PROPERTY_ID).equals
(25L).toPredicate();
SecurityContextHolder.getContext().setAuthentication(TestAuthenticationFactory.createAdministrator());
SystemException systemException = null;
try {
provider.updateResources(request, predicate);
}
catch (SystemException e){
systemException = e;
}
assertNotNull(systemException);
verify(managementController, clusters, cluster,
configGroup, response, configGroupResponse, configHelper, hostDAO, hostEntity1, hostEntity2, h1, h2);
}
@Test
public void testUpdateConfigGroupAsAmbariAdministrator() throws Exception {
testUpdateConfigGroup(TestAuthenticationFactory.createAdministrator());
}
@Test
public void testUpdateConfigGroupAsClusterAdministrator() throws Exception {
testUpdateConfigGroup(TestAuthenticationFactory.createClusterAdministrator());
}
@Test
public void testUpdateConfigGroupAsClusterOperator() throws Exception {
testUpdateConfigGroup(TestAuthenticationFactory.createClusterOperator());
}
@Test
public void testUpdateConfigGroupAsServiceAdministrator() throws Exception {
testUpdateConfigGroup(TestAuthenticationFactory.createServiceAdministrator());
}
@Test(expected = AuthorizationException.class)
public void testUpdateConfigGroupAsServiceOperator() throws Exception {
testUpdateConfigGroup(TestAuthenticationFactory.createServiceOperator());
}
@Test(expected = AuthorizationException.class)
public void testUpdateConfigGroupAsClusterUser() throws Exception {
testUpdateConfigGroup(TestAuthenticationFactory.createClusterUser());
}
private void testUpdateConfigGroup(Authentication authentication) throws Exception {
AmbariManagementController managementController = createMock(AmbariManagementController.class);
RequestStatusResponse response = createNiceMock(RequestStatusResponse.class);
ConfigHelper configHelper = createNiceMock(ConfigHelper.class);
Clusters clusters = createNiceMock(Clusters.class);
Cluster cluster = createNiceMock(Cluster.class);
Host h1 = createNiceMock(Host.class);
Host h2 = createNiceMock(Host.class);
HostEntity hostEntity1 = createMock(HostEntity.class);
HostEntity hostEntity2 = createMock(HostEntity.class);
final ConfigGroup configGroup = createNiceMock(ConfigGroup.class);
ConfigGroupResponse configGroupResponse = createNiceMock
(ConfigGroupResponse.class);
expect(cluster.isConfigTypeExists("core-site")).andReturn(true).anyTimes();
expect(managementController.getClusters()).andReturn(clusters).anyTimes();
expect(managementController.getAuthName()).andReturn("admin").anyTimes();
expect(clusters.getCluster("Cluster100")).andReturn(cluster).anyTimes();
expect(clusters.getHost("h1")).andReturn(h1);
expect(clusters.getHost("h2")).andReturn(h2);
expect(hostDAO.findByName("h1")).andReturn(hostEntity1).anyTimes();
expect(hostDAO.findById(1L)).andReturn(hostEntity1).anyTimes();
expect(hostDAO.findByName("h2")).andReturn(hostEntity2).anyTimes();
expect(hostDAO.findById(2L)).andReturn(hostEntity2).anyTimes();
expect(hostEntity1.getHostId()).andReturn(1L).atLeastOnce();
expect(hostEntity2.getHostId()).andReturn(2L).atLeastOnce();
expect(h1.getHostId()).andReturn(1L).anyTimes();
expect(h2.getHostId()).andReturn(2L).anyTimes();
expect(configGroup.getName()).andReturn("test-1").anyTimes();
expect(configGroup.getId()).andReturn(25L).anyTimes();
expect(configGroup.getTag()).andReturn("tag-1").anyTimes();
expect(configGroup.convertToResponse()).andReturn(configGroupResponse).anyTimes();
expect(configGroupResponse.getClusterName()).andReturn("Cluster100").anyTimes();
expect(configGroupResponse.getId()).andReturn(25L).anyTimes();
expect(cluster.getConfigGroups()).andStubAnswer(new IAnswer<Map<Long, ConfigGroup>>() {
@Override
public Map<Long, ConfigGroup> answer() throws Throwable {
Map<Long, ConfigGroup> configGroupMap = new HashMap<Long, ConfigGroup>();
configGroupMap.put(configGroup.getId(), configGroup);
return configGroupMap;
}
});
replay(managementController, clusters, cluster,
configGroup, response, configGroupResponse, configHelper, hostDAO, hostEntity1, hostEntity2, h1, h2);
ResourceProvider provider = getConfigGroupResourceProvider
(managementController);
Map<String, Object> properties = new LinkedHashMap<String, Object>();
Set<Map<String, Object>> hostSet = new HashSet<Map<String, Object>>();
Map<String, Object> host1 = new HashMap<String, Object>();
host1.put(ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID, "h1");
hostSet.add(host1);
Map<String, Object> host2 = new HashMap<String, Object>();
host2.put(ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID, "h2");
hostSet.add(host2);
Set<Map<String, Object>> configSet = new HashSet<Map<String, Object>>();
Map<String, String> configMap = new HashMap<String, String>();
Map<String, Object> configs = new HashMap<String, Object>();
configs.put("type", "core-site");
configs.put("tag", "version100");
configMap.put("key1", "value1");
configs.put("properties", configMap);
configSet.add(configs);
properties.put(ConfigGroupResourceProvider
.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID, "Cluster100");
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_NAME_PROPERTY_ID,
"test-1");
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID,
"tag-1");
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_HOSTS_PROPERTY_ID,
hostSet );
properties.put(ConfigGroupResourceProvider.CONFIGGROUP_CONFIGS_PROPERTY_ID,
configSet);
Map<String, String> mapRequestProps = new HashMap<String, String>();
mapRequestProps.put("context", "Called from a test");
Request request = PropertyHelper.getUpdateRequest(properties, mapRequestProps);
Predicate predicate = new PredicateBuilder().property
(ConfigGroupResourceProvider.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals
("Cluster100").and().
property(ConfigGroupResourceProvider.CONFIGGROUP_ID_PROPERTY_ID).equals
(25L).toPredicate();
SecurityContextHolder.getContext().setAuthentication(authentication);
provider.updateResources(request, predicate);
verify(managementController, clusters, cluster,
configGroup, response, configGroupResponse, configHelper, hostDAO, hostEntity1, hostEntity2, h1, h2);
}
@Test
public void testGetConfigGroupAsAmbariAdministrator() throws Exception {
testGetConfigGroup(TestAuthenticationFactory.createAdministrator());
}
@Test
public void testGetConfigGroupAsClusterAdministrator() throws Exception {
testGetConfigGroup(TestAuthenticationFactory.createClusterAdministrator());
}
@Test
public void testGetConfigGroupAsClusterOperator() throws Exception {
testGetConfigGroup(TestAuthenticationFactory.createClusterOperator());
}
@Test
public void testGetConfigGroupAsServiceAdministrator() throws Exception {
testGetConfigGroup(TestAuthenticationFactory.createServiceAdministrator());
}
@Test
public void testGetConfigGroupAsServiceOperator() throws Exception {
testGetConfigGroup(TestAuthenticationFactory.createServiceOperator());
}
@Test
public void testGetConfigGroupAsClusterUser() throws Exception {
testGetConfigGroup(TestAuthenticationFactory.createClusterUser());
}
@SuppressWarnings("unchecked")
private void testGetConfigGroup(Authentication authentication) throws Exception {
AmbariManagementController managementController = createMock(AmbariManagementController.class);
Clusters clusters = createNiceMock(Clusters.class);
Cluster cluster = createNiceMock(Cluster.class);
Host h1 = createNiceMock(Host.class);
final Long host1Id = 1L;
List<Long> hostIds = new ArrayList<Long>() {{ add(host1Id); }};
List<String> hostNames = new ArrayList<String>() {{ add("h1"); }};
HostEntity hostEntity1 = createMock(HostEntity.class);
expect(hostDAO.getHostNamesByHostIds(hostIds)).andReturn(hostNames).atLeastOnce();
expect(hostDAO.findByName("h1")).andReturn(hostEntity1).anyTimes();
expect(hostEntity1.getHostId()).andReturn(host1Id).anyTimes();
ConfigGroup configGroup1 = createNiceMock(ConfigGroup.class);
ConfigGroup configGroup2 = createNiceMock(ConfigGroup.class);
ConfigGroup configGroup3 = createNiceMock(ConfigGroup.class);
ConfigGroup configGroup4 = createNiceMock(ConfigGroup.class);
ConfigGroupResponse response1 = createNiceMock(ConfigGroupResponse.class);
ConfigGroupResponse response2 = createNiceMock(ConfigGroupResponse.class);
ConfigGroupResponse response3 = createNiceMock(ConfigGroupResponse.class);
ConfigGroupResponse response4 = createNiceMock(ConfigGroupResponse.class);
Map<Long, ConfigGroup> configGroupMap = new HashMap<Long, ConfigGroup>();
configGroupMap.put(1L, configGroup1);
configGroupMap.put(2L, configGroup2);
configGroupMap.put(3L, configGroup3);
configGroupMap.put(4L, configGroup4);
Map<Long, ConfigGroup> configGroupByHostname = new HashMap<Long, ConfigGroup>();
configGroupByHostname.put(4L, configGroup4);
expect(configGroup1.convertToResponse()).andReturn(response1).anyTimes();
expect(configGroup2.convertToResponse()).andReturn(response2).anyTimes();
expect(configGroup3.convertToResponse()).andReturn(response3).anyTimes();
expect(configGroup4.convertToResponse()).andReturn(response4).anyTimes();
expect(managementController.getClusters()).andReturn(clusters).anyTimes();
expect(clusters.getCluster("Cluster100")).andReturn(cluster).anyTimes();
expect(cluster.getConfigGroups()).andReturn(configGroupMap).anyTimes();
expect(cluster.getClusterName()).andReturn("Cluster100").anyTimes();
expect(configGroup1.getName()).andReturn("g1").anyTimes();
expect(configGroup2.getName()).andReturn("g2").anyTimes();
expect(configGroup3.getName()).andReturn("g3").anyTimes();
expect(configGroup4.getName()).andReturn("g4").anyTimes();
expect(configGroup1.getTag()).andReturn("t1").anyTimes();
expect(configGroup2.getTag()).andReturn("t2").anyTimes();
expect(configGroup3.getTag()).andReturn("t3").anyTimes();
expect(configGroup4.getTag()).andReturn("t4").anyTimes();
Map<Long, Host> hostMap = new HashMap<Long, Host>();
hostMap.put(host1Id, h1);
expect(configGroup4.getHosts()).andReturn(hostMap).anyTimes();
expect(response1.getClusterName()).andReturn("Cluster100").anyTimes();
expect(response2.getClusterName()).andReturn("Cluster100").anyTimes();
expect(response3.getClusterName()).andReturn("Cluster100").anyTimes();
expect(response4.getClusterName()).andReturn("Cluster100").anyTimes();
expect(response1.getId()).andReturn(1L).anyTimes();
expect(response2.getId()).andReturn(2L).anyTimes();
expect(response3.getId()).andReturn(3L).anyTimes();
expect(response4.getId()).andReturn(4L).anyTimes();
expect(response2.getGroupName()).andReturn("g2").anyTimes();
expect(response3.getTag()).andReturn("t3").anyTimes();
expect(cluster.getConfigGroupsByHostname("h1")).andReturn(configGroupByHostname).anyTimes();
Set<Map<String, Object>> hostObj = new HashSet<Map<String, Object>>();
Map<String, Object> hostnames = new HashMap<String, Object>();
hostnames.put("host_name", "h1");
hostObj.add(hostnames);
expect(response4.getHosts()).andReturn(hostObj).anyTimes();
replay(managementController, clusters, cluster, hostDAO, hostEntity1,
configGroup1, configGroup2, configGroup3, configGroup4, response1, response2, response3, response4);
ResourceProvider resourceProvider = getConfigGroupResourceProvider
(managementController);
Set<String> propertyIds = new HashSet<String>();
propertyIds.add(ConfigGroupResourceProvider.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID);
propertyIds.add(ConfigGroupResourceProvider.CONFIGGROUP_ID_PROPERTY_ID);
// Read all
Predicate predicate = new PredicateBuilder().property
(ConfigGroupResourceProvider.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID)
.equals("Cluster100").toPredicate();
Request request = PropertyHelper.getReadRequest(propertyIds);
Set<Resource> resources = resourceProvider.getResources(request,
predicate);
assertEquals(4, resources.size());
// Read by id
predicate = new PredicateBuilder().property(ConfigGroupResourceProvider
.CONFIGGROUP_ID_PROPERTY_ID).equals(1L).and().property
(ConfigGroupResourceProvider.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID)
.equals("Cluster100").toPredicate();
SecurityContextHolder.getContext().setAuthentication(authentication);
resources = resourceProvider.getResources(request, predicate);
assertEquals(1, resources.size());
assertEquals(1L, resources.iterator().next().getPropertyValue
(ConfigGroupResourceProvider.CONFIGGROUP_ID_PROPERTY_ID));
// Read by Name
predicate = new PredicateBuilder().property(ConfigGroupResourceProvider
.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals("Cluster100").and()
.property(ConfigGroupResourceProvider.CONFIGGROUP_NAME_PROPERTY_ID)
.equals("g2").toPredicate();
resources = resourceProvider.getResources(request, predicate);
assertEquals(1, resources.size());
assertEquals("g2", resources.iterator().next().getPropertyValue
(ConfigGroupResourceProvider.CONFIGGROUP_NAME_PROPERTY_ID));
// Read by tag
predicate = new PredicateBuilder().property(ConfigGroupResourceProvider
.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals("Cluster100").and()
.property(ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID)
.equals("t3").toPredicate();
resources = resourceProvider.getResources(request, predicate);
assertEquals(1, resources.size());
assertEquals("t3", resources.iterator().next().getPropertyValue
(ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID));
// Read by hostname (hosts=h1)
predicate = new PredicateBuilder().property(ConfigGroupResourceProvider
.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals("Cluster100").and()
.property(ConfigGroupResourceProvider.CONFIGGROUP_HOSTS_PROPERTY_ID)
.equals("h1").toPredicate();
resources = resourceProvider.getResources(request, predicate);
assertEquals(1, resources.size());
Set<Map<String, Object>> hostSet = (Set<Map<String, Object>>)
resources.iterator().next()
.getPropertyValue(ConfigGroupResourceProvider
.CONFIGGROUP_HOSTS_PROPERTY_ID);
assertEquals("h1", hostSet.iterator().next().get
(ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID));
// Read by hostname (hosts/host_name=h1)
predicate = new PredicateBuilder().property(ConfigGroupResourceProvider
.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals("Cluster100").and()
.property(ConfigGroupResourceProvider.CONFIGGROUP_HOSTS_HOSTNAME_PROPERTY_ID)
.equals("h1").toPredicate();
resources = resourceProvider.getResources(request, predicate);
assertEquals(1, resources.size());
hostSet = (Set<Map<String, Object>>)
resources.iterator().next()
.getPropertyValue(ConfigGroupResourceProvider
.CONFIGGROUP_HOSTS_PROPERTY_ID);
assertEquals("h1", hostSet.iterator().next().get
(ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID));
// Read by tag and hostname (hosts=h1) - Positive
predicate = new PredicateBuilder().property(ConfigGroupResourceProvider
.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals("Cluster100").and()
.property(ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID)
.equals("t4").and().property(ConfigGroupResourceProvider
.CONFIGGROUP_HOSTS_PROPERTY_ID).equals(host1Id).toPredicate();
resources = resourceProvider.getResources(request, predicate);
assertEquals(1, resources.size());
hostSet = (Set<Map<String, Object>>)
resources.iterator().next()
.getPropertyValue(ConfigGroupResourceProvider
.CONFIGGROUP_HOSTS_PROPERTY_ID);
assertEquals("h1", hostSet.iterator().next().get
(ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID));
// Read by tag and hostname (hosts/host_name=h1) - Positive
predicate = new PredicateBuilder().property(ConfigGroupResourceProvider
.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals("Cluster100").and()
.property(ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID)
.equals("t4").and().property(ConfigGroupResourceProvider
.CONFIGGROUP_HOSTS_HOSTNAME_PROPERTY_ID).equals("h1").toPredicate();
resources = resourceProvider.getResources(request, predicate);
assertEquals(1, resources.size());
hostSet = (Set<Map<String, Object>>)
resources.iterator().next()
.getPropertyValue(ConfigGroupResourceProvider
.CONFIGGROUP_HOSTS_PROPERTY_ID);
assertEquals("h1", hostSet.iterator().next().get
(ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID));
// Read by id
predicate = new PredicateBuilder().property(ConfigGroupResourceProvider
.CONFIGGROUP_ID_PROPERTY_ID).equals(11L).and().property
(ConfigGroupResourceProvider.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID)
.equals("Cluster100").toPredicate();
NoSuchResourceException resourceException = null;
try {
resourceProvider.getResources(request, predicate);
} catch (NoSuchResourceException ce) {
resourceException = ce;
}
Assert.assertNotNull(resourceException);
verify(managementController, clusters, cluster, hostDAO, hostEntity1,
configGroup1, configGroup2, configGroup3, configGroup4, response1, response2, response3, response4);
}
@Test
public void testDeleteConfigGroupAsAmbariAdministrator() throws Exception {
testDeleteConfigGroup(TestAuthenticationFactory.createAdministrator());
}
@Test
public void testDeleteConfigGroupAsClusterAdministrator() throws Exception {
testDeleteConfigGroup(TestAuthenticationFactory.createClusterAdministrator());
}
@Test
public void testDeleteConfigGroupAsClusterOperator() throws Exception {
testDeleteConfigGroup(TestAuthenticationFactory.createClusterOperator());
}
@Test
public void testDeleteConfigGroupAsServiceAdministrator() throws Exception {
testDeleteConfigGroup(TestAuthenticationFactory.createServiceAdministrator());
}
@Test(expected = AuthorizationException.class)
public void testDeleteConfigGroupAsServiceOperator() throws Exception {
testDeleteConfigGroup(TestAuthenticationFactory.createServiceOperator());
}
@Test(expected = AuthorizationException.class)
public void testDeleteConfigGroupAsClusterUser() throws Exception {
testDeleteConfigGroup(TestAuthenticationFactory.createClusterUser());
}
private void testDeleteConfigGroup(Authentication authentication) throws Exception {
AmbariManagementController managementController = createMock(AmbariManagementController.class);
Clusters clusters = createNiceMock(Clusters.class);
Cluster cluster = createNiceMock(Cluster.class);
ConfigGroup configGroup = createNiceMock(ConfigGroup.class);
expect(managementController.getAuthName()).andReturn("admin").anyTimes();
expect(managementController.getClusters()).andReturn(clusters).anyTimes();
expect(clusters.getCluster("Cluster100")).andReturn(cluster).anyTimes();
expect(cluster.getConfigGroups()).andReturn(Collections.singletonMap(1L, configGroup));
cluster.deleteConfigGroup(1L);
replay(managementController, clusters, cluster, configGroup);
ResourceProvider resourceProvider = getConfigGroupResourceProvider
(managementController);
AbstractResourceProviderTest.TestObserver observer = new AbstractResourceProviderTest.TestObserver();
((ObservableResourceProvider) resourceProvider).addObserver(observer);
Predicate predicate = new PredicateBuilder().property
(ConfigGroupResourceProvider.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID)
.equals("Cluster100").and().property(ConfigGroupResourceProvider
.CONFIGGROUP_ID_PROPERTY_ID).equals(1L).toPredicate();
SecurityContextHolder.getContext().setAuthentication(authentication);
resourceProvider.deleteResources(new RequestImpl(null, null, null, null), predicate);
ResourceProviderEvent lastEvent = observer.getLastEvent();
Assert.assertNotNull(lastEvent);
Assert.assertEquals(Resource.Type.ConfigGroup, lastEvent.getResourceType());
Assert.assertEquals(ResourceProviderEvent.Type.Delete, lastEvent.getType());
Assert.assertEquals(predicate, lastEvent.getPredicate());
Assert.assertNull(lastEvent.getRequest());
verify(managementController, clusters, cluster, configGroup);
}
@Test
public void testGetConfigGroupRequest_populatesConfigAttributes() throws Exception {
AmbariManagementController managementController = createMock(AmbariManagementController.class);
ConfigGroupResourceProvider resourceProvider = getConfigGroupResourceProvider
(managementController);
Set<Map<String, String>> desiredConfigProperties = new HashSet<Map<String, String>>();
Map<String, String> desiredConfig1 = new HashMap<String, String>();
desiredConfig1.put("tag", "version2");
desiredConfig1.put("type", "type1");
desiredConfig1.put("properties/key1", "value1");
desiredConfig1.put("properties/key2", "value2");
desiredConfig1.put("properties_attributes/attr1/key1", "true");
desiredConfig1.put("properties_attributes/attr1/key2", "false");
desiredConfig1.put("properties_attributes/attr2/key1", "15");
desiredConfigProperties.add(desiredConfig1);
Map<String, Object> properties = new HashMap<String, Object>();
properties.put("ConfigGroup/hosts", new HashMap<String, String>(){{put("host_name", "ambari1");}});
properties.put("ConfigGroup/cluster_name", "c");
properties.put("ConfigGroup/desired_configs", desiredConfigProperties);
ConfigGroupRequest request = resourceProvider.getConfigGroupRequest(properties);
assertNotNull(request);
Map<String, Config> configMap = request.getConfigs();
assertNotNull(configMap);
assertEquals(1, configMap.size());
assertTrue(configMap.containsKey("type1"));
Config config = configMap.get("type1");
assertEquals("type1", config.getType());
Map<String, String> configProperties = config.getProperties();
assertNotNull(configProperties);
assertEquals(2, configProperties.size());
assertEquals("value1", configProperties.get("key1"));
assertEquals("value2", configProperties.get("key2"));
Map<String, Map<String, String>> configAttributes = config.getPropertiesAttributes();
assertNotNull(configAttributes);
assertEquals(2, configAttributes.size());
assertTrue(configAttributes.containsKey("attr1"));
Map<String, String> attr1 = configAttributes.get("attr1");
assertNotNull(attr1);
assertEquals(2, attr1.size());
assertEquals("true", attr1.get("key1"));
assertEquals("false", attr1.get("key2"));
assertTrue(configAttributes.containsKey("attr2"));
Map<String, String> attr2 = configAttributes.get("attr2");
assertNotNull(attr2);
assertEquals(1, attr2.size());
assertEquals("15", attr2.get("key1"));
}
}
| AMBARI-18481. Fix Authentication data is not available error in ConfigGroupResourceProviderTest (rlevas)
| ambari-server/src/test/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProviderTest.java | AMBARI-18481. Fix Authentication data is not available error in ConfigGroupResourceProviderTest (rlevas) | <ide><path>mbari-server/src/test/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProviderTest.java
<del>/**
<add>/*
<ide> * Licensed to the Apache Software Foundation (ASF) under one
<ide> * or more contributor license agreements. See the NOTICE file
<ide> * distributed with this work for additional information
<ide>
<ide> import com.google.inject.Binder;
<ide> import com.google.inject.Guice;
<del>import com.google.inject.Injector;
<ide> import com.google.inject.Module;
<ide> import com.google.inject.util.Modules;
<ide> import org.apache.ambari.server.controller.AmbariManagementController;
<ide> import org.easymock.IAnswer;
<ide> import org.junit.Assert;
<ide> import org.junit.Before;
<del>import org.junit.BeforeClass;
<ide> import org.junit.Test;
<ide> import org.springframework.security.core.Authentication;
<ide> import org.springframework.security.core.context.SecurityContextHolder;
<ide> import static org.easymock.EasyMock.createMock;
<ide> import static org.easymock.EasyMock.createNiceMock;
<ide> import static org.easymock.EasyMock.expect;
<del>import static org.easymock.EasyMock.expectLastCall;
<add>import static org.easymock.EasyMock.newCapture;
<ide> import static org.easymock.EasyMock.replay;
<ide> import static org.easymock.EasyMock.verify;
<ide> import static org.easymock.EasyMock.createStrictMock;
<ide>
<ide> public class ConfigGroupResourceProviderTest {
<ide>
<del> private Injector injector;
<del>
<ide> private HostDAO hostDAO = null;
<ide>
<del> @BeforeClass
<del> public static void setupAuthentication() {
<add> @Before
<add> public void setup() throws Exception {
<ide> // Clear authenticated user so that authorization checks will pass
<ide> SecurityContextHolder.getContext().setAuthentication(null);
<del> }
<del>
<del> @Before
<del> public void setup() throws Exception {
<add>
<ide> hostDAO = createStrictMock(HostDAO.class);
<ide>
<ide> // Create injector after all mocks have been initialized
<del> injector = Guice.createInjector(Modules.override(
<add> Guice.createInjector(Modules.override(
<ide> new InMemoryDefaultTestModule()).with(new MockModule()));
<ide> }
<ide>
<del> ConfigGroupResourceProvider getConfigGroupResourceProvider
<add> private ConfigGroupResourceProvider getConfigGroupResourceProvider
<ide> (AmbariManagementController managementController) {
<ide> Resource.Type type = Resource.Type.ConfigGroup;
<ide>
<ide> expect(hostEntity1.getHostId()).andReturn(1L).atLeastOnce();
<ide> expect(hostEntity2.getHostId()).andReturn(2L).atLeastOnce();
<ide>
<del> Capture<Cluster> clusterCapture = new Capture<Cluster>();
<del> Capture<String> captureName = new Capture<String>();
<del> Capture<String> captureDesc = new Capture<String>();
<del> Capture<String> captureTag = new Capture<String>();
<del> Capture<Map<String, Config>> captureConfigs = new Capture<Map<String,
<del> Config>>();
<del> Capture<Map<Long, Host>> captureHosts = new Capture<Map<Long, Host>>();
<add> Capture<Cluster> clusterCapture = newCapture();
<add> Capture<String> captureName = newCapture();
<add> Capture<String> captureDesc = newCapture();
<add> Capture<String> captureTag = newCapture();
<add> Capture<Map<String, Config>> captureConfigs = newCapture();
<add> Capture<Map<Long, Host>> captureHosts = newCapture();
<ide>
<ide> expect(configGroupFactory.createNew(capture(clusterCapture),
<del> capture(captureName), capture(captureTag), capture(captureDesc),
<del> capture(captureConfigs), capture(captureHosts))).andReturn(configGroup);
<add> capture(captureName), capture(captureTag), capture(captureDesc),
<add> capture(captureConfigs), capture(captureHosts))).andReturn(configGroup);
<ide>
<ide> replay(managementController, clusters, cluster, configGroupFactory,
<del> configGroup, response, hostDAO, hostEntity1, hostEntity2);
<add> configGroup, response, hostDAO, hostEntity1, hostEntity2);
<ide>
<ide> ResourceProvider provider = getConfigGroupResourceProvider
<del> (managementController);
<add> (managementController);
<ide>
<ide> Set<Map<String, Object>> propertySet = new LinkedHashSet<Map<String, Object>>();
<ide> Map<String, Object> properties = new LinkedHashMap<String, Object>();
<ide> configSet.add(configs);
<ide>
<ide> properties.put(ConfigGroupResourceProvider
<del> .CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID, "Cluster100");
<add> .CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID, "Cluster100");
<ide> properties.put(ConfigGroupResourceProvider.CONFIGGROUP_NAME_PROPERTY_ID,
<del> "test-1");
<add> "test-1");
<ide> properties.put(ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID,
<del> "tag-1");
<add> "tag-1");
<ide> properties.put(ConfigGroupResourceProvider.CONFIGGROUP_HOSTS_PROPERTY_ID,
<del> hostSet);
<add> hostSet);
<ide> properties.put(ConfigGroupResourceProvider.CONFIGGROUP_CONFIGS_PROPERTY_ID,
<del> configSet);
<add> configSet);
<ide>
<ide> propertySet.add(properties);
<ide>
<ide> provider.createResources(request);
<ide>
<ide> verify(managementController, clusters, cluster, configGroupFactory,
<del> configGroup, response, hostDAO, hostEntity1, hostEntity2);
<add> configGroup, response, hostDAO, hostEntity1, hostEntity2);
<ide>
<ide> assertEquals("version100", captureConfigs.getValue().get("core-site")
<del> .getTag());
<add> .getTag());
<ide> assertTrue(captureHosts.getValue().containsKey(1L));
<ide> assertTrue(captureHosts.getValue().containsKey(2L));
<ide> }
<ide> expect(managementController.getClusters()).andReturn(clusters).anyTimes();
<ide> expect(clusters.getCluster("Cluster100")).andReturn(cluster).anyTimes();
<ide> expect(managementController.getConfigGroupFactory()).andReturn
<del> (configGroupFactory).anyTimes();
<add> (configGroupFactory).anyTimes();
<ide> expect(managementController.getAuthName()).andReturn("admin").anyTimes();
<ide> expect(cluster.getConfigGroups()).andReturn(configGroupMap);
<ide>
<ide> expect(configGroupFactory.createNew((Cluster) anyObject(), (String) anyObject(),
<del> (String) anyObject(), (String) anyObject(), (HashMap) anyObject(),
<del> (HashMap) anyObject())).andReturn(configGroup).anyTimes();
<add> (String) anyObject(), (String) anyObject(), (HashMap) anyObject(),
<add> (HashMap) anyObject())).andReturn(configGroup).anyTimes();
<ide>
<ide> expect(configGroup.getClusterName()).andReturn("Cluster100").anyTimes();
<ide> expect(configGroup.getName()).andReturn("test-1").anyTimes();
<ide> expect(configGroup.getTag()).andReturn("tag-1").anyTimes();
<ide>
<ide> replay(managementController, clusters, cluster, configGroupFactory,
<del> configGroup, response);
<add> configGroup, response);
<ide>
<ide> ResourceProvider provider = getConfigGroupResourceProvider
<del> (managementController);
<add> (managementController);
<ide>
<ide> Map<String, Object> properties = new LinkedHashMap<String, Object>();
<ide> Set<Map<String, Object>> propertySet = new LinkedHashSet<Map<String, Object>>();
<ide>
<ide> properties.put(ConfigGroupResourceProvider
<del> .CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID, "Cluster100");
<add> .CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID, "Cluster100");
<ide> properties.put(ConfigGroupResourceProvider.CONFIGGROUP_NAME_PROPERTY_ID,
<del> "test-1");
<add> "test-1");
<ide> properties.put(ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID,
<del> "tag-1");
<add> "tag-1");
<ide>
<ide> propertySet.add(properties);
<ide> Request request = PropertyHelper.getCreateRequest(propertySet, null);
<ide> Exception exception = null;
<ide> try {
<ide> provider.createResources(request);
<del> } catch (AuthorizationException e){
<add> } catch (AuthorizationException e) {
<ide> throw e;
<ide> } catch (Exception e) {
<ide> exception = e;
<ide> }
<ide>
<ide> verify(managementController, clusters, cluster, configGroupFactory,
<del> configGroup, response);
<add> configGroup, response);
<ide>
<ide> assertNotNull(exception);
<ide> assertTrue(exception instanceof ResourceAlreadyExistsException);
<ide> properties.put(ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID,
<ide> "tag-1");
<ide> properties.put(ConfigGroupResourceProvider.CONFIGGROUP_HOSTS_PROPERTY_ID,
<del> hostSet );
<add> hostSet);
<ide> properties.put(ConfigGroupResourceProvider.CONFIGGROUP_CONFIGS_PROPERTY_ID,
<ide> configSet);
<ide>
<ide> SystemException systemException = null;
<ide> try {
<ide> provider.updateResources(request, predicate);
<del> }
<del> catch (SystemException e){
<add> } catch (SystemException e) {
<ide> systemException = e;
<ide> }
<ide> assertNotNull(systemException);
<ide>
<ide> final ConfigGroup configGroup = createNiceMock(ConfigGroup.class);
<ide> ConfigGroupResponse configGroupResponse = createNiceMock
<del> (ConfigGroupResponse.class);
<add> (ConfigGroupResponse.class);
<ide>
<ide> expect(cluster.isConfigTypeExists("core-site")).andReturn(true).anyTimes();
<ide> expect(managementController.getClusters()).andReturn(clusters).anyTimes();
<ide> });
<ide>
<ide> replay(managementController, clusters, cluster,
<del> configGroup, response, configGroupResponse, configHelper, hostDAO, hostEntity1, hostEntity2, h1, h2);
<add> configGroup, response, configGroupResponse, configHelper, hostDAO, hostEntity1, hostEntity2, h1, h2);
<ide>
<ide> ResourceProvider provider = getConfigGroupResourceProvider
<del> (managementController);
<add> (managementController);
<ide>
<ide> Map<String, Object> properties = new LinkedHashMap<String, Object>();
<ide>
<ide> configSet.add(configs);
<ide>
<ide> properties.put(ConfigGroupResourceProvider
<del> .CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID, "Cluster100");
<add> .CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID, "Cluster100");
<ide> properties.put(ConfigGroupResourceProvider.CONFIGGROUP_NAME_PROPERTY_ID,
<del> "test-1");
<add> "test-1");
<ide> properties.put(ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID,
<del> "tag-1");
<add> "tag-1");
<ide> properties.put(ConfigGroupResourceProvider.CONFIGGROUP_HOSTS_PROPERTY_ID,
<del> hostSet );
<add> hostSet);
<ide> properties.put(ConfigGroupResourceProvider.CONFIGGROUP_CONFIGS_PROPERTY_ID,
<del> configSet);
<add> configSet);
<ide>
<ide> Map<String, String> mapRequestProps = new HashMap<String, String>();
<ide> mapRequestProps.put("context", "Called from a test");
<ide> Request request = PropertyHelper.getUpdateRequest(properties, mapRequestProps);
<ide>
<ide> Predicate predicate = new PredicateBuilder().property
<del> (ConfigGroupResourceProvider.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals
<del> ("Cluster100").and().
<del> property(ConfigGroupResourceProvider.CONFIGGROUP_ID_PROPERTY_ID).equals
<del> (25L).toPredicate();
<add> (ConfigGroupResourceProvider.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals
<add> ("Cluster100").and().
<add> property(ConfigGroupResourceProvider.CONFIGGROUP_ID_PROPERTY_ID).equals
<add> (25L).toPredicate();
<ide>
<ide> SecurityContextHolder.getContext().setAuthentication(authentication);
<ide>
<ide> provider.updateResources(request, predicate);
<ide>
<ide> verify(managementController, clusters, cluster,
<del> configGroup, response, configGroupResponse, configHelper, hostDAO, hostEntity1, hostEntity2, h1, h2);
<add> configGroup, response, configGroupResponse, configHelper, hostDAO, hostEntity1, hostEntity2, h1, h2);
<ide> }
<ide>
<ide> @Test
<ide> Cluster cluster = createNiceMock(Cluster.class);
<ide> Host h1 = createNiceMock(Host.class);
<ide> final Long host1Id = 1L;
<del> List<Long> hostIds = new ArrayList<Long>() {{ add(host1Id); }};
<del> List<String> hostNames = new ArrayList<String>() {{ add("h1"); }};
<add> List<Long> hostIds = new ArrayList<Long>() {{
<add> add(host1Id);
<add> }};
<add> List<String> hostNames = new ArrayList<String>() {{
<add> add("h1");
<add> }};
<ide> HostEntity hostEntity1 = createMock(HostEntity.class);
<ide>
<ide> expect(hostDAO.getHostNamesByHostIds(hostIds)).andReturn(hostNames).atLeastOnce();
<ide> replay(managementController, clusters, cluster, hostDAO, hostEntity1,
<ide> configGroup1, configGroup2, configGroup3, configGroup4, response1, response2, response3, response4);
<ide>
<del> ResourceProvider resourceProvider = getConfigGroupResourceProvider
<del> (managementController);
<add> SecurityContextHolder.getContext().setAuthentication(authentication);
<add>
<add> ResourceProvider resourceProvider = getConfigGroupResourceProvider(managementController);
<ide>
<ide> Set<String> propertyIds = new HashSet<String>();
<ide>
<ide>
<ide> // Read all
<ide> Predicate predicate = new PredicateBuilder().property
<del> (ConfigGroupResourceProvider.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID)
<del> .equals("Cluster100").toPredicate();
<add> (ConfigGroupResourceProvider.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID)
<add> .equals("Cluster100").toPredicate();
<ide> Request request = PropertyHelper.getReadRequest(propertyIds);
<ide>
<del> Set<Resource> resources = resourceProvider.getResources(request,
<del> predicate);
<add> Set<Resource> resources = resourceProvider.getResources(request, predicate);
<ide>
<ide> assertEquals(4, resources.size());
<ide>
<ide> // Read by id
<ide> predicate = new PredicateBuilder().property(ConfigGroupResourceProvider
<del> .CONFIGGROUP_ID_PROPERTY_ID).equals(1L).and().property
<del> (ConfigGroupResourceProvider.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID)
<del> .equals("Cluster100").toPredicate();
<del>
<del> SecurityContextHolder.getContext().setAuthentication(authentication);
<add> .CONFIGGROUP_ID_PROPERTY_ID).equals(1L).and().property
<add> (ConfigGroupResourceProvider.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID)
<add> .equals("Cluster100").toPredicate();
<ide>
<ide> resources = resourceProvider.getResources(request, predicate);
<ide>
<ide> assertEquals(1, resources.size());
<ide> assertEquals(1L, resources.iterator().next().getPropertyValue
<del> (ConfigGroupResourceProvider.CONFIGGROUP_ID_PROPERTY_ID));
<add> (ConfigGroupResourceProvider.CONFIGGROUP_ID_PROPERTY_ID));
<ide>
<ide> // Read by Name
<ide> predicate = new PredicateBuilder().property(ConfigGroupResourceProvider
<del> .CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals("Cluster100").and()
<del> .property(ConfigGroupResourceProvider.CONFIGGROUP_NAME_PROPERTY_ID)
<del> .equals("g2").toPredicate();
<add> .CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals("Cluster100").and()
<add> .property(ConfigGroupResourceProvider.CONFIGGROUP_NAME_PROPERTY_ID)
<add> .equals("g2").toPredicate();
<ide>
<ide> resources = resourceProvider.getResources(request, predicate);
<ide>
<ide> assertEquals(1, resources.size());
<ide> assertEquals("g2", resources.iterator().next().getPropertyValue
<del> (ConfigGroupResourceProvider.CONFIGGROUP_NAME_PROPERTY_ID));
<add> (ConfigGroupResourceProvider.CONFIGGROUP_NAME_PROPERTY_ID));
<ide>
<ide> // Read by tag
<ide> predicate = new PredicateBuilder().property(ConfigGroupResourceProvider
<del> .CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals("Cluster100").and()
<del> .property(ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID)
<del> .equals("t3").toPredicate();
<add> .CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals("Cluster100").and()
<add> .property(ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID)
<add> .equals("t3").toPredicate();
<ide>
<ide> resources = resourceProvider.getResources(request, predicate);
<ide>
<ide> assertEquals(1, resources.size());
<ide> assertEquals("t3", resources.iterator().next().getPropertyValue
<del> (ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID));
<add> (ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID));
<ide>
<ide> // Read by hostname (hosts=h1)
<ide> predicate = new PredicateBuilder().property(ConfigGroupResourceProvider
<del> .CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals("Cluster100").and()
<del> .property(ConfigGroupResourceProvider.CONFIGGROUP_HOSTS_PROPERTY_ID)
<del> .equals("h1").toPredicate();
<add> .CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals("Cluster100").and()
<add> .property(ConfigGroupResourceProvider.CONFIGGROUP_HOSTS_PROPERTY_ID)
<add> .equals("h1").toPredicate();
<ide>
<ide> resources = resourceProvider.getResources(request, predicate);
<ide>
<ide> assertEquals(1, resources.size());
<ide> Set<Map<String, Object>> hostSet = (Set<Map<String, Object>>)
<del> resources.iterator().next()
<del> .getPropertyValue(ConfigGroupResourceProvider
<del> .CONFIGGROUP_HOSTS_PROPERTY_ID);
<add> resources.iterator().next()
<add> .getPropertyValue(ConfigGroupResourceProvider
<add> .CONFIGGROUP_HOSTS_PROPERTY_ID);
<ide> assertEquals("h1", hostSet.iterator().next().get
<del> (ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID));
<add> (ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID));
<ide>
<ide> // Read by hostname (hosts/host_name=h1)
<ide> predicate = new PredicateBuilder().property(ConfigGroupResourceProvider
<del> .CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals("Cluster100").and()
<del> .property(ConfigGroupResourceProvider.CONFIGGROUP_HOSTS_HOSTNAME_PROPERTY_ID)
<del> .equals("h1").toPredicate();
<add> .CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals("Cluster100").and()
<add> .property(ConfigGroupResourceProvider.CONFIGGROUP_HOSTS_HOSTNAME_PROPERTY_ID)
<add> .equals("h1").toPredicate();
<ide>
<ide> resources = resourceProvider.getResources(request, predicate);
<ide>
<ide> assertEquals(1, resources.size());
<ide> hostSet = (Set<Map<String, Object>>)
<del> resources.iterator().next()
<del> .getPropertyValue(ConfigGroupResourceProvider
<del> .CONFIGGROUP_HOSTS_PROPERTY_ID);
<add> resources.iterator().next()
<add> .getPropertyValue(ConfigGroupResourceProvider
<add> .CONFIGGROUP_HOSTS_PROPERTY_ID);
<ide> assertEquals("h1", hostSet.iterator().next().get
<del> (ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID));
<add> (ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID));
<ide>
<ide>
<ide> // Read by tag and hostname (hosts=h1) - Positive
<ide> predicate = new PredicateBuilder().property(ConfigGroupResourceProvider
<del> .CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals("Cluster100").and()
<del> .property(ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID)
<del> .equals("t4").and().property(ConfigGroupResourceProvider
<del> .CONFIGGROUP_HOSTS_PROPERTY_ID).equals(host1Id).toPredicate();
<add> .CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals("Cluster100").and()
<add> .property(ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID)
<add> .equals("t4").and().property(ConfigGroupResourceProvider
<add> .CONFIGGROUP_HOSTS_PROPERTY_ID).equals(host1Id).toPredicate();
<ide>
<ide> resources = resourceProvider.getResources(request, predicate);
<ide>
<ide> assertEquals(1, resources.size());
<ide> hostSet = (Set<Map<String, Object>>)
<del> resources.iterator().next()
<del> .getPropertyValue(ConfigGroupResourceProvider
<del> .CONFIGGROUP_HOSTS_PROPERTY_ID);
<add> resources.iterator().next()
<add> .getPropertyValue(ConfigGroupResourceProvider
<add> .CONFIGGROUP_HOSTS_PROPERTY_ID);
<ide> assertEquals("h1", hostSet.iterator().next().get
<del> (ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID));
<add> (ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID));
<ide>
<ide> // Read by tag and hostname (hosts/host_name=h1) - Positive
<ide> predicate = new PredicateBuilder().property(ConfigGroupResourceProvider
<del> .CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals("Cluster100").and()
<del> .property(ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID)
<del> .equals("t4").and().property(ConfigGroupResourceProvider
<del> .CONFIGGROUP_HOSTS_HOSTNAME_PROPERTY_ID).equals("h1").toPredicate();
<add> .CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID).equals("Cluster100").and()
<add> .property(ConfigGroupResourceProvider.CONFIGGROUP_TAG_PROPERTY_ID)
<add> .equals("t4").and().property(ConfigGroupResourceProvider
<add> .CONFIGGROUP_HOSTS_HOSTNAME_PROPERTY_ID).equals("h1").toPredicate();
<ide>
<ide> resources = resourceProvider.getResources(request, predicate);
<ide>
<ide> assertEquals(1, resources.size());
<ide> hostSet = (Set<Map<String, Object>>)
<del> resources.iterator().next()
<del> .getPropertyValue(ConfigGroupResourceProvider
<del> .CONFIGGROUP_HOSTS_PROPERTY_ID);
<add> resources.iterator().next()
<add> .getPropertyValue(ConfigGroupResourceProvider
<add> .CONFIGGROUP_HOSTS_PROPERTY_ID);
<ide> assertEquals("h1", hostSet.iterator().next().get
<del> (ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID));
<add> (ConfigGroupResourceProvider.CONFIGGROUP_HOSTNAME_PROPERTY_ID));
<ide>
<ide> // Read by id
<ide> predicate = new PredicateBuilder().property(ConfigGroupResourceProvider
<del> .CONFIGGROUP_ID_PROPERTY_ID).equals(11L).and().property
<del> (ConfigGroupResourceProvider.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID)
<del> .equals("Cluster100").toPredicate();
<add> .CONFIGGROUP_ID_PROPERTY_ID).equals(11L).and().property
<add> (ConfigGroupResourceProvider.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID)
<add> .equals("Cluster100").toPredicate();
<ide>
<ide> NoSuchResourceException resourceException = null;
<ide> try {
<ide> replay(managementController, clusters, cluster, configGroup);
<ide>
<ide> ResourceProvider resourceProvider = getConfigGroupResourceProvider
<del> (managementController);
<add> (managementController);
<ide>
<ide> AbstractResourceProviderTest.TestObserver observer = new AbstractResourceProviderTest.TestObserver();
<ide>
<ide> ((ObservableResourceProvider) resourceProvider).addObserver(observer);
<ide>
<ide> Predicate predicate = new PredicateBuilder().property
<del> (ConfigGroupResourceProvider.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID)
<del> .equals("Cluster100").and().property(ConfigGroupResourceProvider
<del> .CONFIGGROUP_ID_PROPERTY_ID).equals(1L).toPredicate();
<add> (ConfigGroupResourceProvider.CONFIGGROUP_CLUSTER_NAME_PROPERTY_ID)
<add> .equals("Cluster100").and().property(ConfigGroupResourceProvider
<add> .CONFIGGROUP_ID_PROPERTY_ID).equals(1L).toPredicate();
<ide>
<ide> SecurityContextHolder.getContext().setAuthentication(authentication);
<ide>
<ide> desiredConfigProperties.add(desiredConfig1);
<ide>
<ide> Map<String, Object> properties = new HashMap<String, Object>();
<del> properties.put("ConfigGroup/hosts", new HashMap<String, String>(){{put("host_name", "ambari1");}});
<add> properties.put("ConfigGroup/hosts", new HashMap<String, String>() {{
<add> put("host_name", "ambari1");
<add> }});
<ide> properties.put("ConfigGroup/cluster_name", "c");
<ide> properties.put("ConfigGroup/desired_configs", desiredConfigProperties);
<ide> |
|
JavaScript | mit | 3bbf243adf17878850a021a4cbdcb29b3e6018cc | 0 | tnt0932/ParkCompass,tnt0932/ParkCompass,tnt0932/ParkCompass | /* scripts.js */
var map;
var xml;
var markers = [];
var userMarker = [];
var infoWindow;
var search_result_list;
var userMarkerPosition = new google.maps.LatLng(49.25, -123.133333);
var parkIcon = 'img/park_icon.png';
var parkIconShadowURL = 'img/park_icon_shadow.png';
var parkIconShadowSize = new google.maps.Size(31, 32);
var parkIconShadowOrigin = new google.maps.Point(0, 0);
var parkIconShadowAnchor = new google.maps.Point(0, 31);
var parkIconShadow = new google.maps.MarkerImage(parkIconShadowURL, parkIconShadowSize, parkIconShadowOrigin, parkIconShadowAnchor);
var userIcon = 'img/user_icon.png';
var userIconShadowURL = 'img/user_icon_shadow.png';
var userIconShadowSize = new google.maps.Size(30, 34);
var userIconShadowOrigin = new google.maps.Point(0, 0);
var userIconShadowAnchor = new google.maps.Point(3, 34);
var userIconShadow = new google.maps.MarkerImage(userIconShadowURL, userIconShadowSize, userIconShadowOrigin, userIconShadowAnchor);
var markerClusterExists = false;
var initialLocation;
var browserSupportFlag = new Boolean();
var clickedFilters = [];
function load(lat, lng) {
userMarkerPosition = new google.maps.LatLng(lat,lng);
map = new google.maps.Map(document.getElementById("map_canvas"), {
center: userMarkerPosition,
zoom: 12,
mapTypeId: 'roadmap',
mapTypeControlOptions: {
style: google.maps.MapTypeControlStyle.DROPDOWN_MENU
}
});
infoWindow = new google.maps.InfoWindow();
search_result_list = document.getElementById('search_results_list');
createUserMarker(map, userMarkerPosition);
searchLocationsNear(userMarkerPosition);
}
function searchLocations() {
var address = document.getElementById("location_search_input").value;
var geocoder = new google.maps.Geocoder();
geocoder.geocode({
address: address
}, function(results, status) {
if (status == google.maps.GeocoderStatus.OK) {
userMarkerPosition = results[0].geometry.location;
searchLocationsNear();
clearUserMarker();
createUserMarker(map, results[0].geometry.location);
map.setCenter(results[0].geometry.location);
} else {
alert(address + ' not found');
}
});
}
function searchLocationsNear(args) {
clearLocations();
var radius = 100; //return all results in a 100km radius - basically, return all results
//console.log(userMarkerPosition.lat(), userMarkerPosition.lng(), radius);
var searchUrl = 'pc_genxml.php?lat=' + userMarkerPosition.lat() + '&lng=' + userMarkerPosition.lng() + '&radius=' + radius + '&filters=' + JSON.stringify(clickedFilters);
//console.log(searchUrl);
downloadUrl(searchUrl, function(data) {
var xml = parseXml(data);
getParksData(xml);
});
}
function getParksData(xml) {
var bounds = new google.maps.LatLngBounds();
var parkNodes = xml.documentElement.getElementsByTagName("park");
if (parkNodes.length == 0) {
userMarkerPosition = new google.maps.LatLng(49.25, -123.133333);
alert('No Metro Vancouver parks found in that area. We\'re going to move your marker back to the heart of Vancouver!');
clearUserMarker();
createUserMarker(map, userMarkerPosition);
searchLocationsNear();
map.setCenter(userMarkerPosition);
return;
}
for (var i = 0; i < parkNodes.length; i++) {
var facilitiesList = [];
var facility;
var facilities = parkNodes[i].childNodes;
for (var x = 0; x < facilities.length; x++) {
var fType = facilities[x].getAttribute("fType");
var fQuan = facilities[x].getAttribute("fQuan");
facility = [fType, fQuan];
facilitiesList.push(facility);
}
var name = parkNodes[i].getAttribute("pName");
var address = parkNodes[i].getAttribute("pAddress");
var neighbourhood = parkNodes[i].getAttribute("nName");
var url = parkNodes[i].getAttribute("slug")
var latlng = new google.maps.LatLng(
parseFloat(parkNodes[i].getAttribute("pLat")), parseFloat(parkNodes[i].getAttribute("pLng")));
var distance = parseFloat(parkNodes[i].getAttribute("distance"));
createResults(name, distance, i);
createMarker(latlng, name, address, neighbourhood, facilitiesList, url);
bounds.extend(latlng);
}
var mcOptions = {
maxZoom: 14,
minimumClusterSize: 4
};
markerCluster = new MarkerClusterer(map, markers, mcOptions);
markerClusterExists = true;
search_result_list.style.visibility = "visible";
search_result_list.onclick = function(e) {
var markerNum = e.target.parentNode.id;
google.maps.event.trigger(markers[markerNum], 'click');
//console.log(markerNum);
};
}
// RESULTS LIST
function createResults(name, distance, num) {
var results = document.createElement("li");
results.id = num;
results.className = 'search_result';
results.innerHTML = '<h2>' + name + '</h2><h2>' + distance.toFixed(1) + 'km</h2>';
search_result_list.appendChild(results);
}
function showingResultsFor() {
var geocoder = new google.maps.Geocoder();
geocoder.geocode({'latLng': userMarkerPosition}, function(results, status) {
if (status == google.maps.GeocoderStatus.OK) {
if (results[1]) {
document.getElementById('showing_results_for_span').innerHTML = (results[0].formatted_address);
}
} else {
alert("Geocoder failed due to: " + status);
}
});
}
// ===========================================
//
// FILTERS
//
// ===========================================
$(document).ready(function() {
$('#facilities_flyout').click(function(e) {
// don't register event if user clicks on containing div, only directly on facilities
if ($(e.target).attr('id') != 'facilities_flyout') {
if (!$(e.target).hasClass('facility_selected')) {
//console.log(e.target);
$(e.target).addClass('facility_selected');
var id = $(e.target).attr('id').substr(6);
clickedFilters.push(id);
//console.log(clickedFilters);
} else {
$(e.target).removeClass('facility_selected');
var id = $(e.target).attr('id').substr(6);
for (var i = 0; i < clickedFilters.length; i++) {
if (clickedFilters[i] == id) {
clickedFilters.splice(i,1);
}
}
//console.log(clickedFilters);
}
searchLocationsNear();
}
});
$('#remove_all_filters_btn').click(function(e) {
clickedFilters.length = 0;
$('#facilities_flyout a').removeClass('facility_selected');
searchLocationsNear();
});
});
// ===========================================
//
// PARK MARKERS
//
// ===========================================
function createMarker(latlng, name, address, neighbourhood, facilitiesList, url) {
var directions = 'http://maps.google.com/maps?saddr='+ userMarkerPosition +'&daddr='+ latlng;
var link = 'http://parkcompass.com/'+url;
var listHtml = '<ul>';
for (var i=0; i < facilitiesList.length; i++) {
listHtml += '<li>'+facilitiesList[i][0]+'<span>'+facilitiesList[i][1]+'</span></li>';
};
listHtml += '</ul>';
var html = '<div class="infowindow"><h2>' + name + "</h2><br/><p>Address: <b>" + address + "</b></p><br/><p>Neighbourhood: <b>" + neighbourhood + "</b></p><br>" + listHtml + "<br><p>Share:<br><input type='text' value='"+link+"' onclick='this.select()' class='parkLink'><a href='" + directions + "' target='_blank'>Directions</a>";
var marker = new google.maps.Marker({
map: map,
position: latlng,
icon: parkIcon,
shadow: parkIconShadow
});
google.maps.event.addListener(marker, 'click', function() {
infoWindow.setContent(html);
infoWindow.open(map, marker);
});
markers.push(marker);
}
function clearLocations() {
infoWindow.close();
for (var i = 0; i < markers.length; i++) {
markers[i].setMap(null);
}
markers.length = 0;
search_result_list.innerHTML = "";
if (markerClusterExists) {
markerCluster.clearMarkers();
markerClusterExists = false;
}
}
// ===========================================
//
// USER MARKER
//
// ===========================================
function createUserMarker(map, center) {
showingResultsFor(center);
var usermarker = new google.maps.Marker({
map: map,
position: center,
icon: userIcon,
shadow: userIconShadow,
draggable: true,
zIndex: 99999
});
google.maps.event.addListener(usermarker, 'dragend', function() {
userMarkerPosition = usermarker.getPosition();
searchLocationsNear();
showingResultsFor();
});
userMarker.push(usermarker);
}
function clearUserMarker() {
for (var i = 0; i < userMarker.length; i++) {
userMarker[i].setMap(null);
}
userMarker.length = 0;
}
// ===========================================
//
// GEOLOCATION
//
// ===========================================
function geolocation() {
// Try W3C Geolocation (Preferred)
if(navigator.geolocation) {
browserSupportFlag = true;
navigator.geolocation.getCurrentPosition(function(position) {
userMarkerPosition = new google.maps.LatLng(position.coords.latitude,position.coords.longitude);
searchLocationsNear();
clearUserMarker();
createUserMarker(map, userMarkerPosition);
map.setCenter(userMarkerPosition);
}, function() {
handleNoGeolocation(browserSupportFlag);
});
}
// Browser doesn't support Geolocation
else {
browserSupportFlag = false;
handleNoGeolocation(browserSupportFlag);
}
function handleNoGeolocation(errorFlag) {
if (errorFlag == true) {
alert("Geolocation service failed. We've placed you in Downtown Vancouver.");
//initialLocation = new google.maps.LatLng(userMarkerPosition);
} else {
alert("Your browser doesn't support geolocation so we've placed you in the heart of Vancouver!");
//initialLocation = new google.maps.LatLng(userMarkerPosition);
}
map.setCenter(userMarkerPosition);
}
}
// ===========================================
//
// XML Functions
//
// ===========================================
function downloadUrl(url, callback) {
var request = window.ActiveXObject ? new ActiveXObject('Microsoft.XMLHTTP') : new XMLHttpRequest;
request.onreadystatechange = function() {
if (request.readyState == 4) {
request.onreadystatechange = doNothing;
callback(request.responseText, request.status);
}
};
request.open('GET', url, true);
request.send(null);
}
function parseXml(str) {
if (window.ActiveXObject) {
var doc = new ActiveXObject('Microsoft.XMLDOM');
doc.loadXML(str);
return doc;
} else if (window.DOMParser) {
return (new DOMParser).parseFromString(str, 'text/xml');
}
}
function doNothing() {}
google.maps.event.addDomListener(window, 'load', function() {
var map = new google.maps.Map(document.getElementById('map_canvas'), mapOptions);
var mapOptions = {
zoom: 11,
center: new google.maps.LatLng(49.3, -123),
mapTypeId: google.maps.MapTypeId.ROADMAP
};
var infoWindow = new google.maps.InfoWindow;
var panelDiv = document.getElementById('sidebar');
var data = new ParksDataSource;
var view = new storeLocator.View(map, data, {
geolocation: false,
features: data.getFeatures()
});
new storeLocator.Panel(panelDiv, {
view: view
});
});
| js/parkcompass-app.1.0.0.js | /* scripts.js */
var map;
var xml;
var markers = [];
var userMarker = [];
var infoWindow;
var search_result_list;
var userMarkerPosition = new google.maps.LatLng(49.25, -123.133333);
var parkIcon = 'img/park_icon.png';
var parkIconShadowURL = 'img/park_icon_shadow.png';
var parkIconShadowSize = new google.maps.Size(31, 32);
var parkIconShadowOrigin = new google.maps.Point(0, 0);
var parkIconShadowAnchor = new google.maps.Point(0, 31);
var parkIconShadow = new google.maps.MarkerImage(parkIconShadowURL, parkIconShadowSize, parkIconShadowOrigin, parkIconShadowAnchor);
var userIcon = 'img/user_icon.png';
var userIconShadowURL = 'img/user_icon_shadow.png';
var userIconShadowSize = new google.maps.Size(30, 34);
var userIconShadowOrigin = new google.maps.Point(0, 0);
var userIconShadowAnchor = new google.maps.Point(3, 34);
var userIconShadow = new google.maps.MarkerImage(userIconShadowURL, userIconShadowSize, userIconShadowOrigin, userIconShadowAnchor);
var markerClusterExists = false;
var initialLocation;
var browserSupportFlag = new Boolean();
var clickedFilters = [];
function load(lat, lng) {
userMarkerPosition = new google.maps.LatLng(lat,lng);
map = new google.maps.Map(document.getElementById("map_canvas"), {
center: userMarkerPosition,
zoom: 12,
mapTypeId: 'roadmap',
mapTypeControlOptions: {
style: google.maps.MapTypeControlStyle.DROPDOWN_MENU
}
});
infoWindow = new google.maps.InfoWindow();
search_result_list = document.getElementById('search_results_list');
createUserMarker(map, userMarkerPosition);
searchLocationsNear(userMarkerPosition);
}
function searchLocations() {
var address = document.getElementById("location_search_input").value;
var geocoder = new google.maps.Geocoder();
geocoder.geocode({
address: address
}, function(results, status) {
if (status == google.maps.GeocoderStatus.OK) {
userMarkerPosition = results[0].geometry.location;
searchLocationsNear();
clearUserMarker();
createUserMarker(map, results[0].geometry.location);
map.setCenter(results[0].geometry.location);
} else {
alert(address + ' not found');
}
});
}
function searchLocationsNear(args) {
clearLocations();
var radius = 100; //return all results in a 100km radius - basically, return all results
//console.log(userMarkerPosition.lat(), userMarkerPosition.lng(), radius);
var searchUrl = 'pc_genxml.php?lat=' + userMarkerPosition.lat() + '&lng=' + userMarkerPosition.lng() + '&radius=' + radius + '&filters=' + JSON.stringify(clickedFilters);
//console.log(searchUrl);
downloadUrl(searchUrl, function(data) {
var xml = parseXml(data);
getParksData(xml);
});
}
function getParksData(xml) {
var bounds = new google.maps.LatLngBounds();
var parkNodes = xml.documentElement.getElementsByTagName("park");
if (parkNodes.length == 0) {
userMarkerPosition = new google.maps.LatLng(49.25, -123.133333);
alert('No Metro Vancouver parks found in that area. We\'re going to move your marker back to the heart of Vancouver!');
clearUserMarker();
createUserMarker(map, userMarkerPosition);
searchLocationsNear();
map.setCenter(userMarkerPosition);
return;
}
for (var i = 0; i < parkNodes.length; i++) {
var facilitiesList = [];
var facility;
var facilities = parkNodes[i].childNodes;
for (var x = 0; x < facilities.length; x++) {
var fType = facilities[x].getAttribute("fType");
var fQuan = facilities[x].getAttribute("fQuan");
facility = [fType, fQuan];
facilitiesList.push(facility);
}
var name = parkNodes[i].getAttribute("pName");
var address = parkNodes[i].getAttribute("pAddress");
var neighbourhood = parkNodes[i].getAttribute("nName");
var url = parkNodes[i].getAttribute("slug")
var latlng = new google.maps.LatLng(
parseFloat(parkNodes[i].getAttribute("pLat")), parseFloat(parkNodes[i].getAttribute("pLng")));
var distance = parseFloat(parkNodes[i].getAttribute("distance"));
createResults(name, distance, i);
createMarker(latlng, name, address, neighbourhood, facilitiesList, url);
bounds.extend(latlng);
}
var mcOptions = {
maxZoom: 14,
minimumClusterSize: 4
};
markerCluster = new MarkerClusterer(map, markers, mcOptions);
markerClusterExists = true;
search_result_list.style.visibility = "visible";
search_result_list.onclick = function(e) {
var markerNum = e.target.parentNode.id;
google.maps.event.trigger(markers[markerNum], 'click');
//console.log(markerNum);
};
}
// RESULTS LIST
function createResults(name, distance, num) {
var results = document.createElement("li");
results.id = num;
results.className = 'search_result';
results.innerHTML = '<h2>' + name + '</h2><h2>' + distance.toFixed(1) + 'km</h2>';
search_result_list.appendChild(results);
}
function showingResultsFor() {
var geocoder = new google.maps.Geocoder();
geocoder.geocode({'latLng': userMarkerPosition}, function(results, status) {
if (status == google.maps.GeocoderStatus.OK) {
if (results[1]) {
document.getElementById('showing_results_for_span').innerHTML = (results[0].formatted_address);
}
} else {
alert("Geocoder failed due to: " + status);
}
});
}
// ===========================================
//
// FILTERS
//
// ===========================================
$(document).ready(function() {
$('#facilities_flyout').click(function(e) {
// don't register event if user clicks on containing div, only directly on facilities
if ($(e.target).attr('id') != 'facilities_flyout') {
if (!$(e.target).hasClass('facility_selected')) {
//console.log(e.target);
$(e.target).addClass('facility_selected');
var id = $(e.target).attr('id').substr(6);
clickedFilters.push(id);
//console.log(clickedFilters);
} else {
$(e.target).removeClass('facility_selected');
var id = $(e.target).attr('id').substr(6);
for (var i = 0; i < clickedFilters.length; i++) {
if (clickedFilters[i] == id) {
clickedFilters.splice(i,1);
}
}
//console.log(clickedFilters);
}
searchLocationsNear();
}
});
$('#remove_all_filters_btn').click(function(e) {
clickedFilters.length = 0;
$('#facilities_flyout a').removeClass('facility_selected');
searchLocationsNear();
});
});
// ===========================================
//
// PARK MARKERS
//
// ===========================================
function createMarker(latlng, name, address, neighbourhood, facilitiesList, url) {
var directions = 'http://maps.google.com/maps?saddr='+ userMarkerPosition +'&daddr='+ latlng;
var link = 'http://parkcompass.com/'+url;
var listHtml = '<ul>';
for (var i=0; i < facilitiesList.length; i++) {
listHtml += '<li>'+facilitiesList[i][0]+'<span>'+facilitiesList[i][1]+'</span></li>';
};
listHtml += '</ul>';
var html = '<div class="infowindow"><h2>' + name + "</h2><br/><p>Address: <b>" + address + "</b></p><br/><p>Neighbourhood: <b>" + neighbourhood + "</b></p><br>" + listHtml + "<br><p>Share:<br><input type='text' value='"+link+"' onclick='this.select()' class='parkLink'><a href='" + directions + "' target='_blank'>Directions</a>";
var marker = new google.maps.Marker({
map: map,
position: latlng,
icon: parkIcon,
shadow: parkIconShadow
});
google.maps.event.addListener(marker, 'click', function() {
infoWindow.setContent(html);
infoWindow.open(map, marker);
});
markers.push(marker);
}
function clearLocations() {
infoWindow.close();
for (var i = 0; i < markers.length; i++) {
markers[i].setMap(null);
}
markers.length = 0;
search_result_list.innerHTML = "";
if (markerClusterExists) {
markerCluster.clearMarkers();
markerClusterExists = false;
}
}
// ===========================================
//
// USER MARKER
//
// ===========================================
function createUserMarker(map, center) {
showingResultsFor(center);
var usermarker = new google.maps.Marker({
map: map,
position: center,
icon: userIcon,
shadow: userIconShadow,
draggable: true,
zIndex: 99999
});
google.maps.event.addListener(usermarker, 'dragend', function() {
userMarkerPosition = usermarker.getPosition();
searchLocationsNear();
showingResultsFor();
});
userMarker.push(usermarker);
}
function clearUserMarker() {
for (var i = 0; i < userMarker.length; i++) {
userMarker[i].setMap(null);
}
userMarker.length = 0;
}
// ===========================================
//
// GEOLOCATION
//
// ===========================================
function geolocation() {
// Try W3C Geolocation (Preferred)
if(navigator.geolocation) {
browserSupportFlag = true;
navigator.geolocation.getCurrentPosition(function(position) {
userMarkerPosition = new google.maps.LatLng(position.coords.latitude,position.coords.longitude);
searchLocationsNear();
clearUserMarker();
createUserMarker(map, userMarkerPosition);
map.setCenter(userMarkerPosition);
}, function() {
handleNoGeolocation(browserSupportFlag);
});
}
// Browser doesn't support Geolocation
else {
browserSupportFlag = false;
handleNoGeolocation(browserSupportFlag);
}
function handleNoGeolocation(errorFlag) {
if (errorFlag == true) {
alert("Geolocation service failed. We've placed you in Downtown Vancouver.");
//initialLocation = new google.maps.LatLng(userMarkerPosition);
} else {
alert("Your browser doesn't support geolocation so we've placed you in the heart of Vancouver!");
//initialLocation = new google.maps.LatLng(userMarkerPosition);
}
map.setCenter(userMarkerPosition);
}
}
// ===========================================
//
// XML Functions
//
// ===========================================
function downloadUrl(url, callback) {
var request = window.ActiveXObject ? new ActiveXObject('Microsoft.XMLHTTP') : new XMLHttpRequest;
request.onreadystatechange = function() {
if (request.readyState == 4) {
request.onreadystatechange = doNothing;
callback(request.responseText, request.status);
}
};
request.open('GET', url, true);
request.send(null);
}
function parseXml(str) {
if (window.ActiveXObject) {
var doc = new ActiveXObject('Microsoft.XMLDOM');
doc.loadXML(str);
return doc;
} else if (window.DOMParser) {
return (new DOMParser).parseFromString(str, 'text/xml');
}
}
function doNothing() {}
google.maps.event.addDomListener(window, 'load', function() {
var map = new google.maps.Map(document.getElementById('map_canvas'), mapOptions);
var mapOptions = {
zoom: 11,
center: new google.maps.LatLng(49.3, -123),
mapTypeId: google.maps.MapTypeId.ROADMAP
};
var infoWindow = new google.maps.InfoWindow;
var panelDiv = document.getElementById('sidebar');
var data = new ParksDataSource;
var view = new storeLocator.View(map, data, {
geolocation: false,
features: data.getFeatures()
});
new storeLocator.Panel(panelDiv, {
view: view
});
});
| arbitrary change to test cloud9
| js/parkcompass-app.1.0.0.js | arbitrary change to test cloud9 | <ide><path>s/parkcompass-app.1.0.0.js
<ide> searchLocationsNear(userMarkerPosition);
<ide>
<ide> }
<del>
<ide>
<ide> function searchLocations() {
<ide> var address = document.getElementById("location_search_input").value; |
|
Java | bsd-3-clause | 96e0d2f06d1861d6c9570f2d5c16270c188b5eb6 | 0 | flutter/flutter-intellij,flutter/flutter-intellij,flutter/flutter-intellij,flutter/flutter-intellij,flutter/flutter-intellij | /*
* Copyright 2017 The Chromium Authors. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
package io.flutter.view;
import com.intellij.execution.runners.ExecutionUtil;
import com.intellij.execution.ui.ConsoleViewContentType;
import com.intellij.execution.ui.layout.impl.JBRunnerTabs;
import com.intellij.icons.AllIcons;
import com.intellij.ide.BrowserUtil;
import com.intellij.ide.browsers.BrowserLauncher;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.actionSystem.ex.CustomComponentAction;
import com.intellij.openapi.actionSystem.impl.ActionButton;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.components.PersistentStateComponent;
import com.intellij.openapi.components.Storage;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.SimpleToolWindowPanel;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.wm.ToolWindow;
import com.intellij.openapi.wm.ToolWindowManager;
import com.intellij.openapi.wm.ex.ToolWindowEx;
import com.intellij.openapi.wm.ex.ToolWindowManagerEx;
import com.intellij.ui.SimpleTextAttributes;
import com.intellij.ui.content.*;
import com.intellij.ui.tabs.TabInfo;
import icons.FlutterIcons;
import io.flutter.FlutterBundle;
import io.flutter.FlutterInitializer;
import io.flutter.inspector.InspectorService;
import io.flutter.run.daemon.FlutterApp;
import io.flutter.run.daemon.FlutterDevice;
import io.flutter.settings.FlutterSettings;
import io.flutter.utils.VmServiceListenerAdapter;
import org.dartlang.vm.service.VmService;
import org.dartlang.vm.service.element.Event;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.*;
// TODO(devoncarew): Display an fps graph.
@com.intellij.openapi.components.State(
name = "FlutterView",
storages = {@Storage("$WORKSPACE_FILE$")}
)
public class FlutterView implements PersistentStateComponent<FlutterViewState>, Disposable {
private static class PerAppState {
ArrayList<FlutterViewAction> flutterViewActions = new ArrayList<>();
ArrayList<InspectorPanel> inspectorPanels = new ArrayList<>();
Content content;
boolean sendRestartNotificationOnNextFrame = false;
}
public static final String TOOL_WINDOW_ID = "Flutter Inspector";
public static final String WIDGET_TREE_LABEL = "Widgets";
public static final String RENDER_TREE_LABEL = "Render Tree";
@NotNull
private final FlutterViewState state = new FlutterViewState();
@NotNull
private final Project myProject;
private String restoreToolWindowId;
private final Map<FlutterApp, PerAppState> perAppViewState = new HashMap<>();
public FlutterView(@NotNull Project project) {
myProject = project;
}
@Override
public void dispose() {
}
@NotNull
@Override
public FlutterViewState getState() {
return this.state;
}
@Override
public void loadState(FlutterViewState state) {
this.state.copyFrom(state);
}
public void initToolWindow(ToolWindow window) {
// Add a feedback button.
if (window instanceof ToolWindowEx) {
final AnAction sendFeedbackAction = new AnAction("Send Feedback", "Send Feedback", FlutterIcons.Feedback) {
@Override
public void actionPerformed(AnActionEvent event) {
BrowserUtil.browse("https://goo.gl/WrMB43");
}
};
((ToolWindowEx)window).setTitleActions(sendFeedbackAction);
}
// TODO(jacobr): add a message explaining the empty contents if the user
// manually opens the window when there is not yet a running app.
}
private DefaultActionGroup createToolbar(@NotNull ToolWindow toolWindow, @NotNull FlutterApp app) {
final DefaultActionGroup toolbarGroup = new DefaultActionGroup();
toolbarGroup.add(registerAction(new ToggleInspectModeAction(app)));
toolbarGroup.addSeparator();
toolbarGroup.add(registerAction(new DebugDrawAction(app)));
toolbarGroup.add(registerAction(new TogglePlatformAction(app)));
toolbarGroup.add(registerAction(new PerformanceOverlayAction(app)));
toolbarGroup.addSeparator();
toolbarGroup.add(registerAction(new OpenTimelineViewAction(app)));
toolbarGroup.add(registerAction(new OpenObservatoryAction(app)));
toolbarGroup.addSeparator();
toolbarGroup.add(new OverflowActionsAction(this, app));
return toolbarGroup;
}
FlutterViewAction registerAction(FlutterViewAction action) {
getOrCreateStateForApp(action.app).flutterViewActions.add(action);
return action;
}
private PerAppState getStateForApp(FlutterApp app) {
return perAppViewState.get(app);
}
private PerAppState getOrCreateStateForApp(FlutterApp app) {
PerAppState state = perAppViewState.computeIfAbsent(app, k -> new PerAppState());
return state;
}
private void addInspector(FlutterApp app, ToolWindow toolWindow) {
final ContentManager contentManager = toolWindow.getContentManager();
final ContentFactory contentFactory = ContentFactory.SERVICE.getInstance();
final SimpleToolWindowPanel toolWindowPanel = new SimpleToolWindowPanel(true);
final JBRunnerTabs tabs = new JBRunnerTabs(myProject, ActionManager.getInstance(), null, this);
final List<FlutterDevice> existingDevices = new ArrayList<>();
for (FlutterApp otherApp : perAppViewState.keySet()) {
existingDevices.add(otherApp.device());
}
final Content content = contentFactory.createContent(null, app.device().getUniqueName(existingDevices), false);
content.setComponent(tabs.getComponent());
content.putUserData(ToolWindow.SHOW_CONTENT_ICON, Boolean.TRUE);
content.setIcon(FlutterIcons.Phone);
contentManager.addContent(content);
final PerAppState state = getOrCreateStateForApp(app);
assert (state.content == null);
state.content = content;
final DefaultActionGroup toolbarGroup = createToolbar(toolWindow, app);
toolWindowPanel.setToolbar(ActionManager.getInstance().createActionToolbar("FlutterViewToolbar", toolbarGroup, true).getComponent());
addInspectorPanel("Widgets", tabs, state, InspectorService.FlutterTreeType.widget, app, toolWindow, toolbarGroup, true);
addInspectorPanel("Render Tree", tabs, state, InspectorService.FlutterTreeType.renderObject, app, toolWindow, toolbarGroup, false);
}
private void addInspectorPanel(String displayName,
JBRunnerTabs tabs,
PerAppState state,
InspectorService.FlutterTreeType treeType,
FlutterApp flutterApp,
@NotNull ToolWindow toolWindow,
DefaultActionGroup toolbarGroup,
boolean selectedTab) {
final InspectorPanel inspectorPanel = new InspectorPanel(this, flutterApp, flutterApp::isSessionActive, treeType);
final TabInfo tabInfo = new TabInfo(inspectorPanel).setActions(toolbarGroup, ActionPlaces.TOOLBAR)
.append(displayName, SimpleTextAttributes.REGULAR_ATTRIBUTES);
tabs.addTab(tabInfo);
state.inspectorPanels.add(inspectorPanel);
if (selectedTab) {
tabs.select(tabInfo, false);
}
}
/**
* Called when a debug connection starts.
*/
public void debugActive(@NotNull FlutterViewMessages.FlutterDebugEvent event) {
if (FlutterSettings.getInstance().isOpenInspectorOnAppLaunch()) {
autoActivateToolWindow();
}
final FlutterApp app = event.app;
final ToolWindowManager toolWindowManager = ToolWindowManager.getInstance(myProject);
if (!(toolWindowManager instanceof ToolWindowManagerEx)) {
return;
}
final ToolWindow toolWindow = toolWindowManager.getToolWindow(FlutterView.TOOL_WINDOW_ID);
if (toolWindow == null) {
return;
}
listenForRenderTreeActivations(toolWindow);
addInspector(app, toolWindow);
event.vmService.addVmServiceListener(new VmServiceListenerAdapter() {
@Override
public void connectionOpened() {
onAppChanged(app);
}
@Override
public void received(String streamId, Event event) {
// Note: we depend here on the streamListen("Extension") call in InspectorService.
if (StringUtil.equals(streamId, VmService.EXTENSION_STREAM_ID)) {
if (StringUtil.equals("Flutter.Frame", event.getExtensionKind())) {
handleFlutterFrame(app);
}
}
}
@Override
public void connectionClosed() {
ApplicationManager.getApplication().invokeLater(() -> {
final ContentManager contentManager = toolWindow.getContentManager();
onAppChanged(app);
final PerAppState state = perAppViewState.remove(app);
if (state != null && state.content != null) {
contentManager.removeContent(state.content, true);
}
if (perAppViewState.isEmpty()) {
// No more applications are running.
restorePreviousToolWindow();
}
});
}
});
onAppChanged(app);
app.addStateListener(new FlutterApp.FlutterAppListener() {
public void notifyAppRestarted() {
// When we get a restart finishes, queue up a notification to the flutter view
// actions. We don't notify right away because the new isolate can take a little
// while to start up. We wait until we get the first frame event, which is
// enough of an indication that the isolate and flutter framework are initialized
// enough to receive service calls (for example, calls to restore various framework
// debugging settings).
final PerAppState state = getStateForApp(app);
if (state != null) {
state.sendRestartNotificationOnNextFrame = true;
}
}
});
}
private static void listenForRenderTreeActivations(@NotNull ToolWindow toolWindow) {
final ContentManager contentManager = toolWindow.getContentManager();
contentManager.addContentManagerListener(new ContentManagerAdapter() {
@Override
public void selectionChanged(ContentManagerEvent event) {
final ContentManagerEvent.ContentOperation operation = event.getOperation();
if (operation == ContentManagerEvent.ContentOperation.add) {
final String name = event.getContent().getTabName();
if (Objects.equals(name, RENDER_TREE_LABEL)) {
FlutterInitializer.getAnalytics().sendEvent("inspector", "renderTreeSelected");
}
else if (Objects.equals(name, WIDGET_TREE_LABEL)) {
FlutterInitializer.getAnalytics().sendEvent("inspector", "widgetTreeSelected");
}
}
}
});
}
private void handleFlutterFrame(FlutterApp app) {
final PerAppState state = getStateForApp(app);
if (state != null && state.sendRestartNotificationOnNextFrame) {
state.sendRestartNotificationOnNextFrame = false;
notifyActionsOnRestart(app);
}
}
private void notifyActionsAppStarted(FlutterApp app) {
final PerAppState state = getStateForApp(app);
if (state == null) {
return;
}
for (FlutterViewAction action : state.flutterViewActions) {
action.handleAppStarted();
}
}
private void notifyActionsOnRestart(FlutterApp app) {
final PerAppState state = getStateForApp(app);
if (state == null) {
return;
}
for (FlutterViewAction action : state.flutterViewActions) {
action.handleAppRestarted();
}
}
private void notifyActionsAppStopped(FlutterApp app) {
final PerAppState state = getStateForApp(app);
if (state == null) {
return;
}
state.sendRestartNotificationOnNextFrame = false;
}
private void onAppChanged(FlutterApp app) {
if (myProject.isDisposed()) {
return;
}
final ToolWindow toolWindow = ToolWindowManager.getInstance(myProject).getToolWindow(TOOL_WINDOW_ID);
if (toolWindow == null) {
return;
}
if (perAppViewState.isEmpty()) {
toolWindow.setIcon(FlutterIcons.Flutter_13);
notifyActionsAppStopped(app);
}
else {
toolWindow.setIcon(ExecutionUtil.getLiveIndicator(FlutterIcons.Flutter_13));
notifyActionsAppStarted(app);
}
final PerAppState state = getStateForApp(app);
if (state != null) {
for (InspectorPanel inspectorPanel : state.inspectorPanels) {
inspectorPanel.onAppChanged();
}
}
}
/**
* Activate the tool window; on app termination, restore any previously active tool window.
*/
private void autoActivateToolWindow() {
final ToolWindowManager toolWindowManager = ToolWindowManager.getInstance(myProject);
if (!(toolWindowManager instanceof ToolWindowManagerEx)) {
return;
}
restoreToolWindowId = null;
final ToolWindow flutterToolWindow = toolWindowManager.getToolWindow(FlutterView.TOOL_WINDOW_ID);
if (flutterToolWindow.isVisible()) {
return;
}
final ToolWindowManagerEx toolWindowManagerEx = (ToolWindowManagerEx)toolWindowManager;
for (String id : toolWindowManagerEx.getIdsOn(flutterToolWindow.getAnchor())) {
final ToolWindow toolWindow = toolWindowManagerEx.getToolWindow(id);
if (toolWindow.isVisible()) {
restoreToolWindowId = id;
}
}
flutterToolWindow.show(null);
}
private void restorePreviousToolWindow() {
if (restoreToolWindowId == null) {
return;
}
ApplicationManager.getApplication().invokeLater(() -> {
if (myProject.isDisposed()) {
return;
}
final ToolWindowManager toolWindowManager = ToolWindowManager.getInstance(myProject);
final ToolWindow flutterToolWindow = toolWindowManager.getToolWindow(FlutterView.TOOL_WINDOW_ID);
// Show this view iff the flutter view is the one still visible.
if (flutterToolWindow.isVisible()) {
final ToolWindow toolWindow = toolWindowManager.getToolWindow(restoreToolWindowId);
toolWindow.show(null);
}
restoreToolWindowId = null;
});
}
}
class DebugDrawAction extends FlutterViewToggleableAction {
DebugDrawAction(@NotNull FlutterApp app) {
super(app, FlutterBundle.message("flutter.view.debugPaint.text"), FlutterBundle.message("flutter.view.debugPaint.description"),
AllIcons.General.TbShown);
}
protected void perform(AnActionEvent event) {
if (app.isSessionActive()) {
app.callBooleanExtension("ext.flutter.debugPaint", isSelected());
}
}
public void handleAppStarted() {
handleAppRestarted();
}
public void handleAppRestarted() {
if (isSelected()) {
perform(null);
}
}
}
class PerformanceOverlayAction extends FlutterViewToggleableAction {
PerformanceOverlayAction(@NotNull FlutterApp app) {
super(app, "Toggle Performance Overlay", "Toggle Performance Overlay", AllIcons.Modules.Library);
}
protected void perform(@Nullable AnActionEvent event) {
if (app.isSessionActive()) {
app.callBooleanExtension("ext.flutter.showPerformanceOverlay", isSelected());
}
}
public void handleAppStarted() {
handleAppRestarted();
}
public void handleAppRestarted() {
if (isSelected()) {
perform(null);
}
}
}
class OpenObservatoryAction extends FlutterViewAction {
OpenObservatoryAction(@NotNull FlutterApp app) {
super(app, FlutterBundle.message("open.observatory.action.text"), FlutterBundle.message("open.observatory.action.description"),
FlutterIcons.OpenObservatory);
}
@Override
public void perform(AnActionEvent event) {
if (app.isSessionActive()) {
final String url = app.getConnector().getBrowserUrl();
if (url != null) {
BrowserLauncher.getInstance().browse(url, null);
}
}
}
}
class OpenTimelineViewAction extends FlutterViewAction {
OpenTimelineViewAction(@NotNull FlutterApp app) {
super(app, "Open Timeline View", "Open Timeline View", FlutterIcons.OpenTimeline);
}
@Override
public void perform(AnActionEvent event) {
if (app.isSessionActive()) {
final String url = app.getConnector().getBrowserUrl();
if (url != null) {
BrowserLauncher.getInstance().browse(url + "/#/timeline-dashboard", null);
}
}
}
}
class TogglePlatformAction extends FlutterViewAction {
private Boolean isCurrentlyAndroid;
TogglePlatformAction(@NotNull FlutterApp app) {
super(app, FlutterBundle.message("flutter.view.togglePlatform.text"),
FlutterBundle.message("flutter.view.togglePlatform.description"),
AllIcons.RunConfigurations.Application);
}
@Override
public void perform(AnActionEvent event) {
if (app.isSessionActive()) {
app.togglePlatform().thenAccept(isAndroid -> {
if (isAndroid == null) {
return;
}
app.togglePlatform(!isAndroid).thenAccept(isNowAndroid -> {
if (app.getConsole() != null && isNowAndroid != null) {
isCurrentlyAndroid = isNowAndroid;
app.getConsole().print(
FlutterBundle.message("flutter.view.togglePlatform.output",
isNowAndroid ? "Android" : "iOS"),
ConsoleViewContentType.SYSTEM_OUTPUT);
}
});
});
}
}
public void handleAppRestarted() {
if (isCurrentlyAndroid != null) {
app.togglePlatform(isCurrentlyAndroid);
}
}
}
class RepaintRainbowAction extends FlutterViewToggleableAction {
RepaintRainbowAction(@NotNull FlutterApp app) {
super(app, "Enable Repaint Rainbow");
}
protected void perform(@Nullable AnActionEvent event) {
if (app.isSessionActive()) {
app.callBooleanExtension("ext.flutter.repaintRainbow", isSelected());
}
}
public void handleAppStarted() {
handleAppRestarted();
}
public void handleAppRestarted() {
if (isSelected()) {
perform(null);
}
}
}
class TimeDilationAction extends FlutterViewToggleableAction {
TimeDilationAction(@NotNull FlutterApp app) {
super(app, "Enable Slow Animations");
}
protected void perform(@Nullable AnActionEvent event) {
final Map<String, Object> params = new HashMap<>();
params.put("timeDilation", isSelected() ? 5.0 : 1.0);
if (app.isSessionActive()) {
app.callServiceExtension("ext.flutter.timeDilation", params);
}
}
public void handleAppRestarted() {
if (isSelected()) {
perform(null);
}
}
}
class ToggleInspectModeAction extends FlutterViewToggleableAction {
ToggleInspectModeAction(@NotNull FlutterApp app) {
super(app, "Toggle Select Widget Mode", "Toggle Select Widget Mode", AllIcons.General.LocateHover);
}
protected void perform(AnActionEvent event) {
if (app.isSessionActive()) {
app.callBooleanExtension("ext.flutter.debugWidgetInspector", isSelected());
// If toggling inspect mode on, bring all devices to the foreground.
// TODO(jacobr): consider only bringing the device for the currently open inspector TAB.
if (isSelected()) {
final FlutterDevice device = app.device();
if (device != null) {
device.bringToFront();
}
}
}
}
public void handleAppRestarted() {
if (isSelected()) {
setSelected(null, false);
}
}
}
class HideSlowBannerAction extends FlutterViewToggleableAction {
HideSlowBannerAction(@NotNull FlutterApp app) {
super(app, "Hide Slow Mode Banner");
}
@Override
protected void perform(@Nullable AnActionEvent event) {
if (app.isSessionActive()) {
app.callBooleanExtension("ext.flutter.debugAllowBanner", !isSelected());
}
}
public void handleAppStarted() {
handleAppRestarted();
}
public void handleAppRestarted() {
if (isSelected()) {
perform(null);
}
}
}
class ShowPaintBaselinesAction extends FlutterViewToggleableAction {
ShowPaintBaselinesAction(@NotNull FlutterApp app) {
super(app, "Show Paint Baselines");
}
@Override
protected void perform(@Nullable AnActionEvent event) {
if (app.isSessionActive()) {
app.callBooleanExtension("ext.flutter.debugPaintBaselinesEnabled", isSelected());
}
}
public void handleAppStarted() {
handleAppRestarted();
}
public void handleAppRestarted() {
if (isSelected()) {
perform(null);
}
}
}
class OverflowActionsAction extends AnAction implements CustomComponentAction {
private final @NotNull FlutterApp app;
private final DefaultActionGroup myActionGroup;
public OverflowActionsAction(@NotNull FlutterView view, @NotNull FlutterApp app) {
super("Additional actions", null, AllIcons.General.Gear);
this.app = app;
myActionGroup = createPopupActionGroup(view, app);
}
@Override
public final void update(AnActionEvent e) {
e.getPresentation().setEnabled(app.isSessionActive());
}
@Override
public void actionPerformed(AnActionEvent e) {
final Presentation presentation = e.getPresentation();
final JComponent button = (JComponent)presentation.getClientProperty("button");
if (button == null) {
return;
}
final ActionPopupMenu popupMenu = ActionManager.getInstance().createActionPopupMenu(
ActionPlaces.UNKNOWN,
myActionGroup);
popupMenu.getComponent().show(button, button.getWidth(), 0);
}
@Override
public JComponent createCustomComponent(Presentation presentation) {
final ActionButton button = new ActionButton(
this,
presentation,
ActionPlaces.UNKNOWN,
ActionToolbar.DEFAULT_MINIMUM_BUTTON_SIZE
);
presentation.putClientProperty("button", button);
return button;
}
private static DefaultActionGroup createPopupActionGroup(FlutterView view, FlutterApp app) {
final DefaultActionGroup group = new DefaultActionGroup();
group.add(view.registerAction(new ShowPaintBaselinesAction(app)));
group.addSeparator();
group.add(view.registerAction(new RepaintRainbowAction(app)));
group.add(view.registerAction(new TimeDilationAction(app)));
group.addSeparator();
group.add(view.registerAction(new HideSlowBannerAction(app)));
return group;
}
}
| src/io/flutter/view/FlutterView.java | /*
* Copyright 2017 The Chromium Authors. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
package io.flutter.view;
import com.intellij.execution.runners.ExecutionUtil;
import com.intellij.execution.ui.ConsoleViewContentType;
import com.intellij.execution.ui.layout.impl.JBRunnerTabs;
import com.intellij.icons.AllIcons;
import com.intellij.ide.BrowserUtil;
import com.intellij.ide.browsers.BrowserLauncher;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.actionSystem.ex.CustomComponentAction;
import com.intellij.openapi.actionSystem.impl.ActionButton;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.components.PersistentStateComponent;
import com.intellij.openapi.components.Storage;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.SimpleToolWindowPanel;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.wm.ToolWindow;
import com.intellij.openapi.wm.ToolWindowManager;
import com.intellij.openapi.wm.ex.ToolWindowEx;
import com.intellij.openapi.wm.ex.ToolWindowManagerEx;
import com.intellij.ui.SimpleTextAttributes;
import com.intellij.ui.content.*;
import com.intellij.ui.tabs.TabInfo;
import icons.FlutterIcons;
import io.flutter.FlutterBundle;
import io.flutter.FlutterInitializer;
import io.flutter.inspector.InspectorService;
import io.flutter.run.daemon.FlutterApp;
import io.flutter.run.daemon.FlutterDevice;
import io.flutter.settings.FlutterSettings;
import io.flutter.utils.VmServiceListenerAdapter;
import org.dartlang.vm.service.VmService;
import org.dartlang.vm.service.element.Event;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.*;
// TODO(devoncarew): Display an fps graph.
@com.intellij.openapi.components.State(
name = "FlutterView",
storages = {@Storage("$WORKSPACE_FILE$")}
)
public class FlutterView implements PersistentStateComponent<FlutterViewState>, Disposable {
private static class PerAppState {
ArrayList<FlutterViewAction> flutterViewActions = new ArrayList<>();
ArrayList<InspectorPanel> inspectorPanels = new ArrayList<>();
Content content;
boolean sendRestartNotificationOnNextFrame = false;
}
public static final String TOOL_WINDOW_ID = "Flutter Inspector";
public static final String WIDGET_TREE_LABEL = "Widgets";
public static final String RENDER_TREE_LABEL = "Render Tree";
@NotNull
private final FlutterViewState state = new FlutterViewState();
@NotNull
private final Project myProject;
private String restoreToolWindowId;
private final Map<FlutterApp, PerAppState> perAppViewState = new HashMap<>();
public FlutterView(@NotNull Project project) {
myProject = project;
}
@Override
public void dispose() {
}
@NotNull
@Override
public FlutterViewState getState() {
return this.state;
}
@Override
public void loadState(FlutterViewState state) {
this.state.copyFrom(state);
}
public void initToolWindow(ToolWindow window) {
// Add a feedback button.
if (window instanceof ToolWindowEx) {
final AnAction sendFeedbackAction = new AnAction("Send Feedback", "Send Feedback", FlutterIcons.Feedback) {
@Override
public void actionPerformed(AnActionEvent event) {
BrowserUtil.browse("https://goo.gl/WrMB43");
}
};
((ToolWindowEx)window).setTitleActions(sendFeedbackAction);
}
// TODO(jacobr): add a message explaining the empty contents if the user
// manually opens the window when there is not yet a running app.
}
private DefaultActionGroup createToolbar(@NotNull ToolWindow toolWindow, @NotNull FlutterApp app) {
final DefaultActionGroup toolbarGroup = new DefaultActionGroup();
toolbarGroup.add(registerAction(new ToggleInspectModeAction(app)));
toolbarGroup.addSeparator();
toolbarGroup.add(registerAction(new DebugDrawAction(app)));
toolbarGroup.add(registerAction(new TogglePlatformAction(app)));
toolbarGroup.add(registerAction(new PerformanceOverlayAction(app)));
toolbarGroup.addSeparator();
toolbarGroup.add(registerAction(new OpenTimelineViewAction(app)));
toolbarGroup.add(registerAction(new OpenObservatoryAction(app)));
toolbarGroup.addSeparator();
toolbarGroup.add(new OverflowActionsAction(this, app));
return toolbarGroup;
}
FlutterViewAction registerAction(FlutterViewAction action) {
getOrCreateStateForApp(action.app).flutterViewActions.add(action);
return action;
}
private PerAppState getStateForApp(FlutterApp app) {
return perAppViewState.get(app);
}
private PerAppState getOrCreateStateForApp(FlutterApp app) {
PerAppState state = perAppViewState.computeIfAbsent(app, k -> new PerAppState());
return state;
}
private void addInspector(FlutterApp app, ToolWindow toolWindow) {
final ContentManager contentManager = toolWindow.getContentManager();
final ContentFactory contentFactory = ContentFactory.SERVICE.getInstance();
final SimpleToolWindowPanel toolWindowPanel = new SimpleToolWindowPanel(true);
final JBRunnerTabs tabs = new JBRunnerTabs(myProject, ActionManager.getInstance(), null, this);
final List<FlutterDevice> existingDevices = new ArrayList<>();
for (FlutterApp otherApp : perAppViewState.keySet()) {
existingDevices.add(otherApp.device());
}
final Content content = contentFactory.createContent(null, app.device().getUniqueName(existingDevices), false);
content.setComponent(tabs.getComponent());
content.putUserData(ToolWindow.SHOW_CONTENT_ICON, Boolean.TRUE);
content.setIcon(FlutterIcons.Phone);
contentManager.addContent(content);
final PerAppState state = getOrCreateStateForApp(app);
assert (state.content == null);
state.content = content;
final DefaultActionGroup toolbarGroup = createToolbar(toolWindow, app);
toolWindowPanel.setToolbar(ActionManager.getInstance().createActionToolbar("FlutterViewToolbar", toolbarGroup, true).getComponent());
addInspectorPanel("Widgets", tabs, state, InspectorService.FlutterTreeType.widget, app, toolWindow, toolbarGroup, true);
addInspectorPanel("Render Tree", tabs, state, InspectorService.FlutterTreeType.renderObject, app, toolWindow, toolbarGroup, false);
}
private void addInspectorPanel(String displayName,
JBRunnerTabs tabs,
PerAppState state,
InspectorService.FlutterTreeType treeType,
FlutterApp flutterApp,
@NotNull ToolWindow toolWindow,
DefaultActionGroup toolbarGroup,
boolean selectedTab) {
final InspectorPanel inspectorPanel = new InspectorPanel(this, flutterApp, flutterApp::isSessionActive, treeType);
final TabInfo tabInfo = new TabInfo(inspectorPanel).setActions(toolbarGroup, ActionPlaces.TOOLBAR)
.append(displayName, SimpleTextAttributes.REGULAR_ATTRIBUTES);
tabs.addTab(tabInfo);
state.inspectorPanels.add(inspectorPanel);
if (selectedTab) {
tabs.select(tabInfo, false);
}
}
/**
* Called when a debug connection starts.
*/
public void debugActive(@NotNull FlutterViewMessages.FlutterDebugEvent event) {
if (FlutterSettings.getInstance().isOpenInspectorOnAppLaunch()) {
autoActivateToolWindow();
}
final FlutterApp app = event.app;
final ToolWindowManager toolWindowManager = ToolWindowManager.getInstance(myProject);
if (!(toolWindowManager instanceof ToolWindowManagerEx)) {
return;
}
final ToolWindow toolWindow = toolWindowManager.getToolWindow(FlutterView.TOOL_WINDOW_ID);
if (toolWindow == null) {
return;
}
listenForRenderTreeActivations(toolWindow);
addInspector(app, toolWindow);
event.vmService.addVmServiceListener(new VmServiceListenerAdapter() {
@Override
public void connectionOpened() {
onAppChanged(app);
}
@Override
public void received(String streamId, Event event) {
// Note: we depend here on the streamListen("Extension") call in InspectorService.
if (StringUtil.equals(streamId, VmService.EXTENSION_STREAM_ID)) {
if (StringUtil.equals("Flutter.Frame", event.getExtensionKind())) {
handleFlutterFrame(app);
}
}
}
@Override
public void connectionClosed() {
ApplicationManager.getApplication().invokeLater(() -> {
final ContentManager contentManager = toolWindow.getContentManager();
onAppChanged(app);
final PerAppState state = perAppViewState.remove(app);
if (state != null && state.content != null) {
contentManager.removeContent(state.content, true);
}
if (perAppViewState.isEmpty()) {
// No more applications are running.
restorePreviousToolWindow();
}
});
}
});
onAppChanged(app);
app.addStateListener(new FlutterApp.FlutterAppListener() {
public void notifyAppRestarted() {
// When we get a restart finishes, queue up a notification to the flutter view
// actions. We don't notify right away because the new isolate can take a little
// while to start up. We wait until we get the first frame event, which is
// enough of an indication that the isolate and flutter framework are initialized
// enough to receive service calls (for example, calls to restore various framework
// debugging settings).
final PerAppState state = getStateForApp(app);
if (state != null) {
state.sendRestartNotificationOnNextFrame = true;
}
}
});
}
private static void listenForRenderTreeActivations(@NotNull ToolWindow toolWindow) {
final ContentManager contentManager = toolWindow.getContentManager();
contentManager.addContentManagerListener(new ContentManagerAdapter() {
@Override
public void selectionChanged(ContentManagerEvent event) {
final ContentManagerEvent.ContentOperation operation = event.getOperation();
if (operation == ContentManagerEvent.ContentOperation.add) {
final String name = event.getContent().getTabName();
if (Objects.equals(name, RENDER_TREE_LABEL)) {
FlutterInitializer.getAnalytics().sendEvent("inspector", "renderTreeSelected");
}
else if (Objects.equals(name, WIDGET_TREE_LABEL)) {
FlutterInitializer.getAnalytics().sendEvent("inspector", "widgetTreeSelected");
}
}
}
});
}
private void handleFlutterFrame(FlutterApp app) {
final PerAppState state = getStateForApp(app);
if (state != null && state.sendRestartNotificationOnNextFrame) {
state.sendRestartNotificationOnNextFrame = false;
notifyActionsOnRestart(app);
}
}
private void notifyActionsAppStarted(FlutterApp app) {
final PerAppState state = getStateForApp(app);
if (state == null) {
return;
}
for (FlutterViewAction action : state.flutterViewActions) {
action.handleAppStarted();
}
}
private void notifyActionsOnRestart(FlutterApp app) {
final PerAppState state = getStateForApp(app);
if (state == null) {
return;
}
for (FlutterViewAction action : state.flutterViewActions) {
action.handleAppRestarted();
}
}
private void notifyActionsAppStopped(FlutterApp app) {
final PerAppState state = getStateForApp(app);
if (state == null) {
return;
}
state.sendRestartNotificationOnNextFrame = false;
}
private void onAppChanged(FlutterApp app) {
if (myProject.isDisposed()) {
return;
}
final ToolWindow toolWindow = ToolWindowManager.getInstance(myProject).getToolWindow(TOOL_WINDOW_ID);
if (toolWindow == null) {
return;
}
if (perAppViewState.isEmpty()) {
toolWindow.setIcon(FlutterIcons.Flutter_13);
notifyActionsAppStopped(app);
}
else {
toolWindow.setIcon(ExecutionUtil.getLiveIndicator(FlutterIcons.Flutter_13));
notifyActionsAppStarted(app);
}
final PerAppState state = getStateForApp(app);
if (state != null) {
for (InspectorPanel inspectorPanel : state.inspectorPanels) {
inspectorPanel.onAppChanged();
}
}
}
/**
* Activate the tool window; on app termination, restore any previously active tool window.
*/
private void autoActivateToolWindow() {
final ToolWindowManager toolWindowManager = ToolWindowManager.getInstance(myProject);
if (!(toolWindowManager instanceof ToolWindowManagerEx)) {
return;
}
restoreToolWindowId = null;
final ToolWindow flutterToolWindow = toolWindowManager.getToolWindow(FlutterView.TOOL_WINDOW_ID);
if (flutterToolWindow.isVisible()) {
return;
}
final ToolWindowManagerEx toolWindowManagerEx = (ToolWindowManagerEx)toolWindowManager;
for (String id : toolWindowManagerEx.getIdsOn(flutterToolWindow.getAnchor())) {
final ToolWindow toolWindow = toolWindowManagerEx.getToolWindow(id);
if (toolWindow.isVisible()) {
restoreToolWindowId = id;
}
}
flutterToolWindow.show(null);
}
private void restorePreviousToolWindow() {
if (restoreToolWindowId == null) {
return;
}
ApplicationManager.getApplication().invokeLater(() -> {
final ToolWindowManager toolWindowManager = ToolWindowManager.getInstance(myProject);
final ToolWindow flutterToolWindow = toolWindowManager.getToolWindow(FlutterView.TOOL_WINDOW_ID);
// Show this view iff the flutter view is the one still visible.
if (flutterToolWindow.isVisible()) {
final ToolWindow toolWindow = toolWindowManager.getToolWindow(restoreToolWindowId);
toolWindow.show(null);
}
restoreToolWindowId = null;
});
}
}
class DebugDrawAction extends FlutterViewToggleableAction {
DebugDrawAction(@NotNull FlutterApp app) {
super(app, FlutterBundle.message("flutter.view.debugPaint.text"), FlutterBundle.message("flutter.view.debugPaint.description"),
AllIcons.General.TbShown);
}
protected void perform(AnActionEvent event) {
if (app.isSessionActive()) {
app.callBooleanExtension("ext.flutter.debugPaint", isSelected());
}
}
public void handleAppStarted() {
handleAppRestarted();
}
public void handleAppRestarted() {
if (isSelected()) {
perform(null);
}
}
}
class PerformanceOverlayAction extends FlutterViewToggleableAction {
PerformanceOverlayAction(@NotNull FlutterApp app) {
super(app, "Toggle Performance Overlay", "Toggle Performance Overlay", AllIcons.Modules.Library);
}
protected void perform(@Nullable AnActionEvent event) {
if (app.isSessionActive()) {
app.callBooleanExtension("ext.flutter.showPerformanceOverlay", isSelected());
}
}
public void handleAppStarted() {
handleAppRestarted();
}
public void handleAppRestarted() {
if (isSelected()) {
perform(null);
}
}
}
class OpenObservatoryAction extends FlutterViewAction {
OpenObservatoryAction(@NotNull FlutterApp app) {
super(app, FlutterBundle.message("open.observatory.action.text"), FlutterBundle.message("open.observatory.action.description"),
FlutterIcons.OpenObservatory);
}
@Override
public void perform(AnActionEvent event) {
if (app.isSessionActive()) {
final String url = app.getConnector().getBrowserUrl();
if (url != null) {
BrowserLauncher.getInstance().browse(url, null);
}
}
}
}
class OpenTimelineViewAction extends FlutterViewAction {
OpenTimelineViewAction(@NotNull FlutterApp app) {
super(app, "Open Timeline View", "Open Timeline View", FlutterIcons.OpenTimeline);
}
@Override
public void perform(AnActionEvent event) {
if (app.isSessionActive()) {
final String url = app.getConnector().getBrowserUrl();
if (url != null) {
BrowserLauncher.getInstance().browse(url + "/#/timeline-dashboard", null);
}
}
}
}
class TogglePlatformAction extends FlutterViewAction {
private Boolean isCurrentlyAndroid;
TogglePlatformAction(@NotNull FlutterApp app) {
super(app, FlutterBundle.message("flutter.view.togglePlatform.text"),
FlutterBundle.message("flutter.view.togglePlatform.description"),
AllIcons.RunConfigurations.Application);
}
@Override
public void perform(AnActionEvent event) {
if (app.isSessionActive()) {
app.togglePlatform().thenAccept(isAndroid -> {
if (isAndroid == null) {
return;
}
app.togglePlatform(!isAndroid).thenAccept(isNowAndroid -> {
if (app.getConsole() != null && isNowAndroid != null) {
isCurrentlyAndroid = isNowAndroid;
app.getConsole().print(
FlutterBundle.message("flutter.view.togglePlatform.output",
isNowAndroid ? "Android" : "iOS"),
ConsoleViewContentType.SYSTEM_OUTPUT);
}
});
});
}
}
public void handleAppRestarted() {
if (isCurrentlyAndroid != null) {
app.togglePlatform(isCurrentlyAndroid);
}
}
}
class RepaintRainbowAction extends FlutterViewToggleableAction {
RepaintRainbowAction(@NotNull FlutterApp app) {
super(app, "Enable Repaint Rainbow");
}
protected void perform(@Nullable AnActionEvent event) {
if (app.isSessionActive()) {
app.callBooleanExtension("ext.flutter.repaintRainbow", isSelected());
}
}
public void handleAppStarted() {
handleAppRestarted();
}
public void handleAppRestarted() {
if (isSelected()) {
perform(null);
}
}
}
class TimeDilationAction extends FlutterViewToggleableAction {
TimeDilationAction(@NotNull FlutterApp app) {
super(app, "Enable Slow Animations");
}
protected void perform(@Nullable AnActionEvent event) {
final Map<String, Object> params = new HashMap<>();
params.put("timeDilation", isSelected() ? 5.0 : 1.0);
if (app.isSessionActive()) {
app.callServiceExtension("ext.flutter.timeDilation", params);
}
}
public void handleAppRestarted() {
if (isSelected()) {
perform(null);
}
}
}
class ToggleInspectModeAction extends FlutterViewToggleableAction {
ToggleInspectModeAction(@NotNull FlutterApp app) {
super(app, "Toggle Select Widget Mode", "Toggle Select Widget Mode", AllIcons.General.LocateHover);
}
protected void perform(AnActionEvent event) {
if (app.isSessionActive()) {
app.callBooleanExtension("ext.flutter.debugWidgetInspector", isSelected());
// If toggling inspect mode on, bring all devices to the foreground.
// TODO(jacobr): consider only bringing the device for the currently open inspector TAB.
if (isSelected()) {
final FlutterDevice device = app.device();
if (device != null) {
device.bringToFront();
}
}
}
}
public void handleAppRestarted() {
if (isSelected()) {
setSelected(null, false);
}
}
}
class HideSlowBannerAction extends FlutterViewToggleableAction {
HideSlowBannerAction(@NotNull FlutterApp app) {
super(app, "Hide Slow Mode Banner");
}
@Override
protected void perform(@Nullable AnActionEvent event) {
if (app.isSessionActive()) {
app.callBooleanExtension("ext.flutter.debugAllowBanner", !isSelected());
}
}
public void handleAppStarted() {
handleAppRestarted();
}
public void handleAppRestarted() {
if (isSelected()) {
perform(null);
}
}
}
class ShowPaintBaselinesAction extends FlutterViewToggleableAction {
ShowPaintBaselinesAction(@NotNull FlutterApp app) {
super(app, "Show Paint Baselines");
}
@Override
protected void perform(@Nullable AnActionEvent event) {
if (app.isSessionActive()) {
app.callBooleanExtension("ext.flutter.debugPaintBaselinesEnabled", isSelected());
}
}
public void handleAppStarted() {
handleAppRestarted();
}
public void handleAppRestarted() {
if (isSelected()) {
perform(null);
}
}
}
class OverflowActionsAction extends AnAction implements CustomComponentAction {
private final @NotNull FlutterApp app;
private final DefaultActionGroup myActionGroup;
public OverflowActionsAction(@NotNull FlutterView view, @NotNull FlutterApp app) {
super("Additional actions", null, AllIcons.General.Gear);
this.app = app;
myActionGroup = createPopupActionGroup(view, app);
}
@Override
public final void update(AnActionEvent e) {
e.getPresentation().setEnabled(app.isSessionActive());
}
@Override
public void actionPerformed(AnActionEvent e) {
final Presentation presentation = e.getPresentation();
final JComponent button = (JComponent)presentation.getClientProperty("button");
if (button == null) {
return;
}
final ActionPopupMenu popupMenu = ActionManager.getInstance().createActionPopupMenu(
ActionPlaces.UNKNOWN,
myActionGroup);
popupMenu.getComponent().show(button, button.getWidth(), 0);
}
@Override
public JComponent createCustomComponent(Presentation presentation) {
final ActionButton button = new ActionButton(
this,
presentation,
ActionPlaces.UNKNOWN,
ActionToolbar.DEFAULT_MINIMUM_BUTTON_SIZE
);
presentation.putClientProperty("button", button);
return button;
}
private static DefaultActionGroup createPopupActionGroup(FlutterView view, FlutterApp app) {
final DefaultActionGroup group = new DefaultActionGroup();
group.add(view.registerAction(new ShowPaintBaselinesAction(app)));
group.addSeparator();
group.add(view.registerAction(new RepaintRainbowAction(app)));
group.add(view.registerAction(new TimeDilationAction(app)));
group.addSeparator();
group.add(view.registerAction(new HideSlowBannerAction(app)));
return group;
}
}
| fix a project use after dispose (#1878)
| src/io/flutter/view/FlutterView.java | fix a project use after dispose (#1878) | <ide><path>rc/io/flutter/view/FlutterView.java
<ide> }
<ide>
<ide> ApplicationManager.getApplication().invokeLater(() -> {
<add> if (myProject.isDisposed()) {
<add> return;
<add> }
<add>
<ide> final ToolWindowManager toolWindowManager = ToolWindowManager.getInstance(myProject);
<ide> final ToolWindow flutterToolWindow = toolWindowManager.getToolWindow(FlutterView.TOOL_WINDOW_ID);
<ide> |
|
Java | apache-2.0 | ca169d68e0ec399a12e9869a55cbe66809606c0d | 0 | dbflute/dbflute-intro,dbflute/dbflute-intro,dbflute/dbflute-intro,dbflute/dbflute-intro,dbflute/dbflute-intro,dbflute/dbflute-intro,dbflute/dbflute-intro | /*
* Copyright 2014-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.dbflute.intro.app.web.welcome;
import org.apache.commons.lang3.StringUtils;
import org.dbflute.intro.app.logic.client.ClientInfoLogic;
import org.dbflute.intro.app.logic.client.ClientUpdateLogic;
import org.dbflute.intro.app.logic.core.PublicPropertiesLogic;
import org.dbflute.intro.app.logic.database.DatabaseInfoLogic;
import org.dbflute.intro.app.logic.dfprop.TestConnectionLogic;
import org.dbflute.intro.app.logic.engine.EngineInstallLogic;
import org.dbflute.intro.app.model.client.ClientModel;
import org.dbflute.intro.app.model.client.ExtlibFile;
import org.dbflute.intro.app.model.client.ProjectInfra;
import org.dbflute.intro.app.model.client.basic.BasicInfoMap;
import org.dbflute.intro.app.model.client.database.DatabaseInfoMap;
import org.dbflute.intro.app.model.client.database.DbConnectionBox;
import org.dbflute.intro.app.model.client.database.various.AdditionalSchemaMap;
import org.dbflute.intro.app.web.base.IntroBaseAction;
import org.dbflute.intro.app.web.welcome.WelcomeCreateBody.ClientPart;
import org.dbflute.intro.dbflute.allcommon.CDef.TargetDatabase;
import org.dbflute.optional.OptionalThing;
import org.lastaflute.web.Execute;
import org.lastaflute.web.response.JsonResponse;
import javax.annotation.Resource;
import java.util.LinkedHashMap;
import java.util.Objects;
import java.util.Optional;
/**
* @author hakiba
*/
public class WelcomeAction extends IntroBaseAction {
// ===================================================================================
// Attribute
// =========
@Resource
private ClientInfoLogic clientInfoLogic;
@Resource
private ClientUpdateLogic clientUpdateLogic;
@Resource
private TestConnectionLogic testConnectionLogic;
@Resource
private EngineInstallLogic engineInstallLogic;
@Resource
private PublicPropertiesLogic publicPropertiesLogic;
@Resource
private DatabaseInfoLogic databaseInfoLogic;
// ===================================================================================
// Execute
// =======
@Execute
public JsonResponse<Void> create(WelcomeCreateBody welcomeCreateBody) {
validate(welcomeCreateBody, messages -> {
ClientPart client = welcomeCreateBody.client;
String projectName = client.projectName;
if (clientInfoLogic.getProjectList().contains(projectName)) {
messages.addErrorsWelcomeClientAlreadyExists("projectName", projectName); // TODO: hakiba refactor type-safe (2016/10/10)
}
// TODO hakiba JDBC Driver's required check depending on database type by jflute (2017/04/13)
// done hakiba needs to check jar existence by jflute (2017/04/06)
TargetDatabase databaseCd = client.databaseCode;
if (!databaseInfoLogic.isEmbeddedJar(databaseCd) && Objects.isNull(client.jdbcDriver.data)) {
messages.addErrorsDatabaseNeedsJar("database", databaseCd.alias());
}
// done hakiba add extension check by jflute (2017/04/06)
Optional.ofNullable(client.jdbcDriver)
.map(driverPart -> driverPart.fileName)
.filter(s -> StringUtils.isNotEmpty(s) && !s.endsWith(".jar"))
.ifPresent(fileName -> messages.addErrorsDatabaseNeedsJar("jdbcDriver", fileName));
});
String latestVersion = publicPropertiesLogic.findProperties().getDBFluteLatestReleaseVersion();
engineInstallLogic.downloadUnzipping(latestVersion);
ClientModel clientModel = mappingToClientModel(welcomeCreateBody.client);
if (welcomeCreateBody.testConnection) {
testConnectionIfPossible(clientModel);
}
clientUpdateLogic.createClient(clientModel);
return JsonResponse.asEmptyBody();
}
// ===================================================================================
// Mapping
// =======
private ClientModel mappingToClientModel(ClientPart clientBody) {
return newClientModel(clientBody);
}
private ClientModel newClientModel(ClientPart clientBody) {
ProjectInfra projectInfra = prepareProjectMeta(clientBody);
BasicInfoMap basicInfoMap = prepareBasicInfoMap(clientBody);
DatabaseInfoMap databaseInfoMap = prepareDatabaseInfoMap(clientBody);
return new ClientModel(projectInfra, basicInfoMap, databaseInfoMap);
}
private ProjectInfra prepareProjectMeta(ClientPart clientBody) {
if (Objects.isNull(clientBody.jdbcDriver)) {
return new ProjectInfra(clientBody.projectName, clientBody.dbfluteVersion);
}
return new ProjectInfra(clientBody.projectName, clientBody.dbfluteVersion, clientBody.jdbcDriver.fileName, clientBody.jdbcDriver.data);
}
private BasicInfoMap prepareBasicInfoMap(ClientPart clientBody) {
return new BasicInfoMap(clientBody.databaseCode, clientBody.languageCode, clientBody.containerCode, clientBody.packageBase);
}
private DatabaseInfoMap prepareDatabaseInfoMap(ClientPart clientBody) {
return OptionalThing.ofNullable(clientBody.mainSchemaSettings, () -> {}).map(databaseBody -> {
DbConnectionBox connectionBox =
new DbConnectionBox(databaseBody.url, databaseBody.schema, databaseBody.user, databaseBody.password);
AdditionalSchemaMap additionalSchemaMap = new AdditionalSchemaMap(new LinkedHashMap<>()); // #pending see the class code
return new DatabaseInfoMap(clientBody.jdbcDriverFqcn, connectionBox, additionalSchemaMap);
}).orElseThrow(() -> {
return new IllegalStateException("Not found the database body: " + clientBody);
});
}
private void testConnectionIfPossible(ClientModel clientModel) {
String dbfluteVersion = clientModel.getProjectInfra().getDbfluteVersion();
OptionalThing<String> jdbcDriverJarPath = clientModel.getProjectInfra().getJdbcDriverExtlibFile().map(ExtlibFile::getCanonicalPath);
DatabaseInfoMap databaseInfoMap = clientModel.getDatabaseInfoMap();
testConnectionLogic.testConnection(dbfluteVersion, jdbcDriverJarPath, databaseInfoMap);
}
}
| src/main/java/org/dbflute/intro/app/web/welcome/WelcomeAction.java | /*
* Copyright 2014-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.dbflute.intro.app.web.welcome;
import org.apache.commons.lang3.StringUtils;
import org.dbflute.intro.app.logic.client.ClientInfoLogic;
import org.dbflute.intro.app.logic.client.ClientUpdateLogic;
import org.dbflute.intro.app.logic.core.PublicPropertiesLogic;
import org.dbflute.intro.app.logic.database.DatabaseInfoLogic;
import org.dbflute.intro.app.logic.dfprop.TestConnectionLogic;
import org.dbflute.intro.app.logic.engine.EngineInstallLogic;
import org.dbflute.intro.app.model.client.ClientModel;
import org.dbflute.intro.app.model.client.ExtlibFile;
import org.dbflute.intro.app.model.client.ProjectInfra;
import org.dbflute.intro.app.model.client.basic.BasicInfoMap;
import org.dbflute.intro.app.model.client.database.DatabaseInfoMap;
import org.dbflute.intro.app.model.client.database.DbConnectionBox;
import org.dbflute.intro.app.model.client.database.various.AdditionalSchemaMap;
import org.dbflute.intro.app.web.base.IntroBaseAction;
import org.dbflute.intro.app.web.welcome.WelcomeCreateBody.ClientPart;
import org.dbflute.intro.dbflute.allcommon.CDef.TargetDatabase;
import org.dbflute.optional.OptionalThing;
import org.lastaflute.web.Execute;
import org.lastaflute.web.response.JsonResponse;
import javax.annotation.Resource;
import java.util.LinkedHashMap;
import java.util.Objects;
import java.util.Optional;
/**
* @author hakiba
*/
public class WelcomeAction extends IntroBaseAction {
// ===================================================================================
// Attribute
// =========
@Resource
private ClientInfoLogic clientInfoLogic;
@Resource
private ClientUpdateLogic clientUpdateLogic;
@Resource
private TestConnectionLogic testConnectionLogic;
@Resource
private EngineInstallLogic engineInstallLogic;
@Resource
private PublicPropertiesLogic publicPropertiesLogic;
@Resource
private DatabaseInfoLogic databaseInfoLogic;
// ===================================================================================
// Execute
// =======
@Execute
public JsonResponse<Void> create(WelcomeCreateBody welcomeCreateBody) {
validate(welcomeCreateBody, messages -> {
ClientPart client = welcomeCreateBody.client;
String projectName = client.projectName;
if (clientInfoLogic.getProjectList().contains(projectName)) {
messages.addErrorsWelcomeClientAlreadyExists("projectName", projectName); // TODO: hakiba refactor type-safe (2016/10/10)
}
// TODO hakiba needs to check jar existence by jflute (2017/04/06)
TargetDatabase databaseCd = client.databaseCode;
if (!databaseInfoLogic.isEmbeddedJar(databaseCd) && Objects.isNull(client.jdbcDriver.data)) {
messages.addErrorsDatabaseNeedsJar("database", databaseCd.alias());
}
// TODO hakiba add extension check by jflute (2017/04/06)
Optional.ofNullable(client.jdbcDriver)
.map(driverPart -> driverPart.fileName)
.filter(s -> StringUtils.isNotEmpty(s) && !s.endsWith(".jar"))
.ifPresent(fileName -> messages.addErrorsDatabaseNeedsJar("jdbcDriver", fileName));
});
String latestVersion = publicPropertiesLogic.findProperties().getDBFluteLatestReleaseVersion();
engineInstallLogic.downloadUnzipping(latestVersion);
ClientModel clientModel = mappingToClientModel(welcomeCreateBody.client);
if (welcomeCreateBody.testConnection) {
testConnectionIfPossible(clientModel);
}
clientUpdateLogic.createClient(clientModel);
return JsonResponse.asEmptyBody();
}
// ===================================================================================
// Mapping
// =======
private ClientModel mappingToClientModel(ClientPart clientBody) {
return newClientModel(clientBody);
}
private ClientModel newClientModel(ClientPart clientBody) {
ProjectInfra projectInfra = prepareProjectMeta(clientBody);
BasicInfoMap basicInfoMap = prepareBasicInfoMap(clientBody);
DatabaseInfoMap databaseInfoMap = prepareDatabaseInfoMap(clientBody);
return new ClientModel(projectInfra, basicInfoMap, databaseInfoMap);
}
private ProjectInfra prepareProjectMeta(ClientPart clientBody) {
if (Objects.isNull(clientBody.jdbcDriver)) {
return new ProjectInfra(clientBody.projectName, clientBody.dbfluteVersion);
}
return new ProjectInfra(clientBody.projectName, clientBody.dbfluteVersion, clientBody.jdbcDriver.fileName, clientBody.jdbcDriver.data);
}
private BasicInfoMap prepareBasicInfoMap(ClientPart clientBody) {
return new BasicInfoMap(clientBody.databaseCode, clientBody.languageCode, clientBody.containerCode, clientBody.packageBase);
}
private DatabaseInfoMap prepareDatabaseInfoMap(ClientPart clientBody) {
return OptionalThing.ofNullable(clientBody.mainSchemaSettings, () -> {}).map(databaseBody -> {
DbConnectionBox connectionBox =
new DbConnectionBox(databaseBody.url, databaseBody.schema, databaseBody.user, databaseBody.password);
AdditionalSchemaMap additionalSchemaMap = new AdditionalSchemaMap(new LinkedHashMap<>()); // #pending see the class code
return new DatabaseInfoMap(clientBody.jdbcDriverFqcn, connectionBox, additionalSchemaMap);
}).orElseThrow(() -> {
return new IllegalStateException("Not found the database body: " + clientBody);
});
}
private void testConnectionIfPossible(ClientModel clientModel) {
String dbfluteVersion = clientModel.getProjectInfra().getDbfluteVersion();
OptionalThing<String> jdbcDriverJarPath = clientModel.getProjectInfra().getJdbcDriverExtlibFile().map(ExtlibFile::getCanonicalPath);
DatabaseInfoMap databaseInfoMap = clientModel.getDatabaseInfoMap();
testConnectionLogic.testConnection(dbfluteVersion, jdbcDriverJarPath, databaseInfoMap);
}
}
| add TODO comment
| src/main/java/org/dbflute/intro/app/web/welcome/WelcomeAction.java | add TODO comment | <ide><path>rc/main/java/org/dbflute/intro/app/web/welcome/WelcomeAction.java
<ide> if (clientInfoLogic.getProjectList().contains(projectName)) {
<ide> messages.addErrorsWelcomeClientAlreadyExists("projectName", projectName); // TODO: hakiba refactor type-safe (2016/10/10)
<ide> }
<del> // TODO hakiba needs to check jar existence by jflute (2017/04/06)
<add> // TODO hakiba JDBC Driver's required check depending on database type by jflute (2017/04/13)
<add> // done hakiba needs to check jar existence by jflute (2017/04/06)
<ide> TargetDatabase databaseCd = client.databaseCode;
<ide> if (!databaseInfoLogic.isEmbeddedJar(databaseCd) && Objects.isNull(client.jdbcDriver.data)) {
<ide> messages.addErrorsDatabaseNeedsJar("database", databaseCd.alias());
<ide> }
<del> // TODO hakiba add extension check by jflute (2017/04/06)
<add> // done hakiba add extension check by jflute (2017/04/06)
<ide> Optional.ofNullable(client.jdbcDriver)
<ide> .map(driverPart -> driverPart.fileName)
<ide> .filter(s -> StringUtils.isNotEmpty(s) && !s.endsWith(".jar")) |
|
Java | apache-2.0 | 7291486ca0ccbff67f444ff1ce3c5429c1378d9c | 0 | sanderbaas/k-9,thuanpq/k-9,dgger/k-9,cketti/k-9,philipwhiuk/k-9,torte71/k-9,tsunli/k-9,thuanpq/k-9,dhootha/k-9,gaionim/k-9,mawiegand/k-9,denim2x/k-9,sanderbaas/k-9,ndew623/k-9,sonork/k-9,roscrazy/k-9,indus1/k-9,XiveZ/k-9,vasyl-khomko/k-9,k9mail/k-9,tonytamsf/k-9,deepworks/k-9,Eagles2F/k-9,rishabhbitsg/k-9,439teamwork/k-9,WenduanMou1/k-9,philipwhiuk/q-mail,rollbrettler/k-9,WenduanMou1/k-9,github201407/k-9,bashrc/k-9,github201407/k-9,gilbertw1/k-9,msdgwzhy6/k-9,Eagles2F/k-9,KitAway/k-9,vt0r/k-9,konfer/k-9,icedman21/k-9,vasyl-khomko/k-9,nilsbraden/k-9,ndew623/k-9,herpiko/k-9,imaeses/k-9,farmboy0/k-9,dgger/k-9,cketti/k-9,ndew623/k-9,farmboy0/k-9,WenduanMou1/k-9,gaionim/k-9,deepworks/k-9,gaionim/k-9,leixinstar/k-9,KitAway/k-9,sedrubal/k-9,dgger/k-9,GuillaumeSmaha/k-9,gnebsy/k-9,sebkur/k-9,dpereira411/k-9,cooperpellaton/k-9,dpereira411/k-9,roscrazy/k-9,sebkur/k-9,Eagles2F/k-9,msdgwzhy6/k-9,farmboy0/k-9,github201407/k-9,vt0r/k-9,rishabhbitsg/k-9,gilbertw1/k-9,jca02266/k-9,herpiko/k-9,bashrc/k-9,vatsalsura/k-9,torte71/k-9,sonork/k-9,G00fY2/k-9_material_design,k9mail/k-9,jca02266/k-9,jberkel/k-9,denim2x/k-9,deepworks/k-9,XiveZ/k-9,cooperpellaton/k-9,crr0004/k-9,moparisthebest/k-9,sonork/k-9,dpereira411/k-9,moparisthebest/k-9,suzp1984/k-9,rtreffer/openpgp-k-9,tsunli/k-9,rtreffer/openpgp-k-9,cooperpellaton/k-9,vatsalsura/k-9,vasyl-khomko/k-9,sebkur/k-9,CodingRmy/k-9,leixinstar/k-9,philipwhiuk/q-mail,mawiegand/k-9,bashrc/k-9,gnebsy/k-9,G00fY2/k-9_material_design,cliniome/pki,leixinstar/k-9,huhu/k-9,CodingRmy/k-9,GuillaumeSmaha/k-9,konfer/k-9,indus1/k-9,imaeses/k-9,Valodim/k-9,KitAway/k-9,suzp1984/k-9,jca02266/k-9,439teamwork/k-9,suzp1984/k-9,tonytamsf/k-9,cketti/k-9,tsunli/k-9,jberkel/k-9,huhu/k-9,sanderbaas/k-9,philipwhiuk/q-mail,mawiegand/k-9,crr0004/k-9,nilsbraden/k-9,dhootha/k-9,herpiko/k-9,crr0004/k-9,sedrubal/k-9,tonytamsf/k-9,huhu/k-9,gnebsy/k-9,gilbertw1/k-9,rollbrettler/k-9,GuillaumeSmaha/k-9,439teamwork/k-9,k9mail/k-9,cliniome/pki,denim2x/k-9,nilsbraden/k-9,rollbrettler/k-9,icedman21/k-9,thuanpq/k-9,imaeses/k-9,dhootha/k-9,icedman21/k-9,moparisthebest/k-9,konfer/k-9,torte71/k-9,msdgwzhy6/k-9,cketti/k-9,cliniome/pki,philipwhiuk/k-9,XiveZ/k-9 |
package com.fsck.k9.controller;
import java.io.CharArrayWriter;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.PriorityBlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import android.app.Application;
import android.app.KeyguardManager;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.media.AudioManager;
import android.net.Uri;
import android.os.PowerManager;
import android.os.Process;
import android.text.TextUtils;
import android.util.Log;
import com.fsck.k9.Account;
import com.fsck.k9.AccountStats;
import com.fsck.k9.K9;
import com.fsck.k9.NotificationSetting;
import com.fsck.k9.Preferences;
import com.fsck.k9.R;
import com.fsck.k9.SearchSpecification;
import com.fsck.k9.activity.FolderList;
import com.fsck.k9.activity.MessageList;
import com.fsck.k9.helper.Utility;
import com.fsck.k9.helper.power.TracingPowerManager;
import com.fsck.k9.helper.power.TracingPowerManager.TracingWakeLock;
import com.fsck.k9.mail.Address;
import com.fsck.k9.mail.FetchProfile;
import com.fsck.k9.mail.Flag;
import com.fsck.k9.mail.Folder;
import com.fsck.k9.mail.Folder.FolderType;
import com.fsck.k9.mail.Folder.OpenMode;
import com.fsck.k9.mail.Message;
import com.fsck.k9.mail.MessagingException;
import com.fsck.k9.mail.Part;
import com.fsck.k9.mail.PushReceiver;
import com.fsck.k9.mail.Pusher;
import com.fsck.k9.mail.Store;
import com.fsck.k9.mail.Transport;
import com.fsck.k9.mail.internet.MimeMessage;
import com.fsck.k9.mail.internet.MimeUtility;
import com.fsck.k9.mail.internet.TextBody;
import com.fsck.k9.mail.store.UnavailableAccountException;
import com.fsck.k9.mail.store.LocalStore;
import com.fsck.k9.mail.store.UnavailableStorageException;
import com.fsck.k9.mail.store.LocalStore.LocalFolder;
import com.fsck.k9.mail.store.LocalStore.LocalMessage;
import com.fsck.k9.mail.store.LocalStore.PendingCommand;
/**
* Starts a long running (application) Thread that will run through commands
* that require remote mailbox access. This class is used to serialize and
* prioritize these commands. Each method that will submit a command requires a
* MessagingListener instance to be provided. It is expected that that listener
* has also been added as a registered listener using addListener(). When a
* command is to be executed, if the listener that was provided with the command
* is no longer registered the command is skipped. The design idea for the above
* is that when an Activity starts it registers as a listener. When it is paused
* it removes itself. Thus, any commands that that activity submitted are
* removed from the queue once the activity is no longer active.
*/
public class MessagingController implements Runnable
{
/**
* Immutable empty {@link String} array
*/
private static final String[] EMPTY_STRING_ARRAY = new String[0];
/**
* Immutable empty {@link Message} array
*/
private static final Message[] EMPTY_MESSAGE_ARRAY = new Message[0];
/**
* Immutable empty {@link Folder} array
*/
private static final Folder[] EMPTY_FOLDER_ARRAY = new Folder[0];
/**
* The maximum message size that we'll consider to be "small". A small message is downloaded
* in full immediately instead of in pieces. Anything over this size will be downloaded in
* pieces with attachments being left off completely and downloaded on demand.
*
*
* 25k for a "small" message was picked by educated trial and error.
* http://answers.google.com/answers/threadview?id=312463 claims that the
* average size of an email is 59k, which I feel is too large for our
* blind download. The following tests were performed on a download of
* 25 random messages.
* <pre>
* 5k - 61 seconds,
* 25k - 51 seconds,
* 55k - 53 seconds,
* </pre>
* So 25k gives good performance and a reasonable data footprint. Sounds good to me.
*/
private static final String PENDING_COMMAND_MOVE_OR_COPY = "com.fsck.k9.MessagingController.moveOrCopy";
private static final String PENDING_COMMAND_MOVE_OR_COPY_BULK = "com.fsck.k9.MessagingController.moveOrCopyBulk";
private static final String PENDING_COMMAND_EMPTY_TRASH = "com.fsck.k9.MessagingController.emptyTrash";
private static final String PENDING_COMMAND_SET_FLAG_BULK = "com.fsck.k9.MessagingController.setFlagBulk";
private static final String PENDING_COMMAND_SET_FLAG = "com.fsck.k9.MessagingController.setFlag";
private static final String PENDING_COMMAND_APPEND = "com.fsck.k9.MessagingController.append";
private static final String PENDING_COMMAND_MARK_ALL_AS_READ = "com.fsck.k9.MessagingController.markAllAsRead";
private static final String PENDING_COMMAND_EXPUNGE = "com.fsck.k9.MessagingController.expunge";
private static MessagingController inst = null;
private BlockingQueue<Command> mCommands = new PriorityBlockingQueue<Command>();
private Thread mThread;
private Set<MessagingListener> mListeners = new CopyOnWriteArraySet<MessagingListener>();
private HashMap<SORT_TYPE, Boolean> sortAscending = new HashMap<SORT_TYPE, Boolean>();
private ConcurrentHashMap<String, AtomicInteger> sendCount = new ConcurrentHashMap<String, AtomicInteger>();
ConcurrentHashMap<Account, Pusher> pushers = new ConcurrentHashMap<Account, Pusher>();
private final ExecutorService threadPool = Executors.newCachedThreadPool();
public enum SORT_TYPE
{
SORT_DATE(R.string.sort_earliest_first, R.string.sort_latest_first, false),
SORT_SUBJECT(R.string.sort_subject_alpha, R.string.sort_subject_re_alpha, true),
SORT_SENDER(R.string.sort_sender_alpha, R.string.sort_sender_re_alpha, true),
SORT_UNREAD(R.string.sort_unread_first, R.string.sort_unread_last, true),
SORT_FLAGGED(R.string.sort_flagged_first, R.string.sort_flagged_last, true),
SORT_ATTACHMENT(R.string.sort_attach_first, R.string.sort_unattached_first, true);
private int ascendingToast;
private int descendingToast;
private boolean defaultAscending;
SORT_TYPE(int ascending, int descending, boolean ndefaultAscending)
{
ascendingToast = ascending;
descendingToast = descending;
defaultAscending = ndefaultAscending;
}
public int getToast(boolean ascending)
{
if (ascending)
{
return ascendingToast;
}
else
{
return descendingToast;
}
}
public boolean isDefaultAscending()
{
return defaultAscending;
}
};
private SORT_TYPE sortType = SORT_TYPE.SORT_DATE;
private MessagingListener checkMailListener = null;
private MemorizingListener memorizingListener = new MemorizingListener();
private boolean mBusy;
private Application mApplication;
// Key is accountUuid:folderName:messageUid , value is unimportant
private ConcurrentHashMap<String, String> deletedUids = new ConcurrentHashMap<String, String>();
private String createMessageKey(Account account, String folder, Message message)
{
return createMessageKey(account, folder, message.getUid());
}
private String createMessageKey(Account account, String folder, String uid)
{
return account.getUuid() + ":" + folder + ":" + uid;
}
private void suppressMessage(Account account, String folder, Message message)
{
if (account == null || folder == null || message == null)
{
return;
}
String messKey = createMessageKey(account, folder, message);
deletedUids.put(messKey, "true");
}
private void unsuppressMessage(Account account, String folder, String uid)
{
if (account == null || folder == null || uid == null)
{
return;
}
String messKey = createMessageKey(account, folder, uid);
deletedUids.remove(messKey);
}
private boolean isMessageSuppressed(Account account, String folder, Message message)
{
if (account == null || folder == null || message == null)
{
return false;
}
String messKey = createMessageKey(account, folder, message);
if (deletedUids.containsKey(messKey))
{
return true;
}
return false;
}
private MessagingController(Application application)
{
mApplication = application;
mThread = new Thread(this);
mThread.start();
if (memorizingListener != null)
{
addListener(memorizingListener);
}
}
/**
* Gets or creates the singleton instance of MessagingController. Application is used to
* provide a Context to classes that need it.
* @param application
* @return
*/
public synchronized static MessagingController getInstance(Application application)
{
if (inst == null)
{
inst = new MessagingController(application);
}
return inst;
}
public boolean isBusy()
{
return mBusy;
}
public void run()
{
Process.setThreadPriority(Process.THREAD_PRIORITY_BACKGROUND);
while (true)
{
String commandDescription = null;
try
{
final Command command = mCommands.take();
if (command != null)
{
commandDescription = command.description;
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Running " + (command.isForeground ? "Foreground" : "Background") + " command '" + command.description + "', seq = " + command.sequence);
mBusy = true;
try
{
command.runnable.run();
}
catch (UnavailableAccountException e)
{
// retry later
new Thread()
{
@Override
public void run()
{
try
{
sleep(30 * 1000);
mCommands.put(command);
}
catch (InterruptedException e)
{
Log.e(K9.LOG_TAG, "interrupted while putting a pending command for"
+ " an unavailable account back into the queue."
+ " THIS SHOULD NEVER HAPPEN.");
}
}
} .start();
}
if (K9.DEBUG)
Log.i(K9.LOG_TAG, (command.isForeground ? "Foreground" : "Background") +
" Command '" + command.description + "' completed");
for (MessagingListener l : getListeners(command.listener))
{
l.controllerCommandCompleted(mCommands.size() > 0);
}
}
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Error running command '" + commandDescription + "'", e);
}
mBusy = false;
}
}
private void put(String description, MessagingListener listener, Runnable runnable)
{
putCommand(mCommands, description, listener, runnable, true);
}
private void putBackground(String description, MessagingListener listener, Runnable runnable)
{
putCommand(mCommands, description, listener, runnable, false);
}
private void putCommand(BlockingQueue<Command> queue, String description, MessagingListener listener, Runnable runnable, boolean isForeground)
{
int retries = 10;
Exception e = null;
while (retries-- > 0)
{
try
{
Command command = new Command();
command.listener = listener;
command.runnable = runnable;
command.description = description;
command.isForeground = isForeground;
queue.put(command);
return;
}
catch (InterruptedException ie)
{
try
{
Thread.sleep(200);
}
catch (InterruptedException ne)
{
}
e = ie;
}
}
throw new Error(e);
}
public void addListener(MessagingListener listener)
{
mListeners.add(listener);
refreshListener(listener);
}
public void refreshListener(MessagingListener listener)
{
if (memorizingListener != null && listener != null)
{
memorizingListener.refreshOther(listener);
}
}
public void removeListener(MessagingListener listener)
{
mListeners.remove(listener);
}
public Set<MessagingListener> getListeners()
{
return mListeners;
}
public Set<MessagingListener> getListeners(MessagingListener listener)
{
if (listener == null)
{
return mListeners;
}
Set<MessagingListener> listeners = new HashSet<MessagingListener>(mListeners);
listeners.add(listener);
return listeners;
}
/**
* Lists folders that are available locally and remotely. This method calls
* listFoldersCallback for local folders before it returns, and then for
* remote folders at some later point. If there are no local folders
* includeRemote is forced by this method. This method should be called from
* a Thread as it may take several seconds to list the local folders.
* TODO this needs to cache the remote folder list
*
* @param account
* @param includeRemote
* @param listener
* @throws MessagingException
*/
public void listFolders(final Account account, final boolean refreshRemote, final MessagingListener listener)
{
threadPool.execute(new Runnable()
{
public void run()
{
listFoldersSynchronous(account, refreshRemote, listener);
}
});
}
/**
* Lists folders that are available locally and remotely. This method calls
* listFoldersCallback for local folders before it returns, and then for
* remote folders at some later point. If there are no local folders
* includeRemote is forced by this method. This method is called in the
* foreground.
* TODO this needs to cache the remote folder list
*
* @param account
* @param includeRemote
* @param listener
* @throws MessagingException
*/
public void listFoldersSynchronous(final Account account, final boolean refreshRemote, final MessagingListener listener)
{
for (MessagingListener l : getListeners(listener))
{
l.listFoldersStarted(account);
}
List<? extends Folder> localFolders = null;
if (!account.isAvailable(mApplication))
{
Log.i(K9.LOG_TAG, "not listing folders of unavailable account");
}
else
{
try
{
Store localStore = account.getLocalStore();
localFolders = localStore.getPersonalNamespaces(false);
Folder[] folderArray = localFolders.toArray(EMPTY_FOLDER_ARRAY);
if (refreshRemote || localFolders == null || localFolders.size() == 0)
{
doRefreshRemote(account, listener);
return;
}
for (MessagingListener l : getListeners(listener))
{
l.listFolders(account, folderArray);
}
}
catch (Exception e)
{
for (MessagingListener l : getListeners(listener))
{
l.listFoldersFailed(account, e.getMessage());
}
addErrorMessage(account, null, e);
return;
}
finally
{
if (localFolders != null)
{
for (Folder localFolder : localFolders)
{
if (localFolder != null)
{
localFolder.close();
}
}
}
}
}
for (MessagingListener l : getListeners(listener))
{
l.listFoldersFinished(account);
}
}
private void doRefreshRemote(final Account account, MessagingListener listener)
{
put("doRefreshRemote", listener, new Runnable()
{
public void run()
{
List<? extends Folder> localFolders = null;
try
{
Store store = account.getRemoteStore();
List<? extends Folder> remoteFolders = store.getPersonalNamespaces(false);
LocalStore localStore = account.getLocalStore();
HashSet<String> remoteFolderNames = new HashSet<String>();
for (int i = 0, count = remoteFolders.size(); i < count; i++)
{
LocalFolder localFolder = localStore.getFolder(remoteFolders.get(i).getName());
if (!localFolder.exists())
{
localFolder.create(FolderType.HOLDS_MESSAGES, account.getDisplayCount());
}
remoteFolderNames.add(remoteFolders.get(i).getName());
}
localFolders = localStore.getPersonalNamespaces(false);
/*
* Clear out any folders that are no longer on the remote store.
*/
for (Folder localFolder : localFolders)
{
String localFolderName = localFolder.getName();
if (localFolderName.equalsIgnoreCase(K9.INBOX) ||
localFolderName.equals(account.getTrashFolderName()) ||
localFolderName.equals(account.getOutboxFolderName()) ||
localFolderName.equals(account.getDraftsFolderName()) ||
localFolderName.equals(account.getSentFolderName()) ||
localFolderName.equals(account.getErrorFolderName()))
{
continue;
}
if (!remoteFolderNames.contains(localFolder.getName()))
{
localFolder.delete(false);
}
}
localFolders = localStore.getPersonalNamespaces(false);
Folder[] folderArray = localFolders.toArray(EMPTY_FOLDER_ARRAY);
for (MessagingListener l : getListeners())
{
l.listFolders(account, folderArray);
}
for (MessagingListener l : getListeners())
{
l.listFoldersFinished(account);
}
}
catch (Exception e)
{
for (MessagingListener l : getListeners())
{
l.listFoldersFailed(account, "");
}
addErrorMessage(account, null, e);
}
finally
{
if (localFolders != null)
{
for (Folder localFolder : localFolders)
{
if (localFolder != null)
{
localFolder.close();
}
}
}
}
}
});
}
/**
* List the messages in the local message store for the given folder asynchronously.
*
* @param account
* @param folder
* @param listener
* @throws MessagingException
*/
public void listLocalMessages(final Account account, final String folder, final MessagingListener listener)
{
threadPool.execute(new Runnable()
{
public void run()
{
listLocalMessagesSynchronous(account, folder, listener);
}
});
}
/**
* List the messages in the local message store for the given folder synchronously.
*
* @param account
* @param folder
* @param listener
* @throws MessagingException
*/
public void listLocalMessagesSynchronous(final Account account, final String folder, final MessagingListener listener)
{
for (MessagingListener l : getListeners(listener))
{
l.listLocalMessagesStarted(account, folder);
}
Folder localFolder = null;
MessageRetrievalListener retrievalListener =
new MessageRetrievalListener()
{
List<Message> pendingMessages = new ArrayList<Message>();
int totalDone = 0;
public void messageStarted(String message, int number, int ofTotal) {}
public void messageFinished(Message message, int number, int ofTotal)
{
if (!isMessageSuppressed(account, folder, message))
{
pendingMessages.add(message);
totalDone++;
if (pendingMessages.size() > 10)
{
addPendingMessages();
}
}
else
{
for (MessagingListener l : getListeners(listener))
{
l.listLocalMessagesRemoveMessage(account, folder, message);
}
}
}
public void messagesFinished(int number)
{
addPendingMessages();
}
private void addPendingMessages()
{
for (MessagingListener l : getListeners(listener))
{
l.listLocalMessagesAddMessages(account, folder, pendingMessages);
}
pendingMessages.clear();
}
};
try
{
Store localStore = account.getLocalStore();
localFolder = localStore.getFolder(folder);
localFolder.open(OpenMode.READ_WRITE);
localFolder.getMessages(
retrievalListener,
false // Skip deleted messages
);
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Got ack that callbackRunner finished");
for (MessagingListener l : getListeners(listener))
{
l.listLocalMessagesFinished(account, folder);
}
}
catch (Exception e)
{
for (MessagingListener l : getListeners(listener))
{
l.listLocalMessagesFailed(account, folder, e.getMessage());
}
addErrorMessage(account, null, e);
}
finally
{
if (localFolder != null)
{
localFolder.close();
}
}
}
public void searchLocalMessages(SearchSpecification searchSpecification, final Message[] messages, final MessagingListener listener)
{
searchLocalMessages(searchSpecification.getAccountUuids(), searchSpecification.getFolderNames(), messages,
searchSpecification.getQuery(), searchSpecification.isIntegrate(), searchSpecification.getRequiredFlags(), searchSpecification.getForbiddenFlags(), listener);
}
/**
* Find all messages in any local account which match the query 'query'
* @param folderNames TODO
* @param query
* @param listener
* @param searchAccounts TODO
* @param account TODO
* @param account
* @throws MessagingException
*/
public void searchLocalMessages(final String[] accountUuids, final String[] folderNames, final Message[] messages, final String query, final boolean integrate,
final Flag[] requiredFlags, final Flag[] forbiddenFlags, final MessagingListener listener)
{
if (K9.DEBUG)
{
Log.i(K9.LOG_TAG, "searchLocalMessages ("
+ "accountUuids=" + Utility.combine(accountUuids, ',')
+ ", folderNames = " + Utility.combine(folderNames, ',')
+ ", messages.size() = " + (messages != null ? messages.length : null)
+ ", query = " + query
+ ", integrate = " + integrate
+ ", requiredFlags = " + Utility.combine(requiredFlags, ',')
+ ", forbiddenFlags = " + Utility.combine(forbiddenFlags, ',')
+ ")");
}
threadPool.execute(new Runnable()
{
public void run()
{
searchLocalMessagesSynchronous(accountUuids,folderNames, messages, query, integrate, requiredFlags, forbiddenFlags, listener);
}
});
}
public void searchLocalMessagesSynchronous(final String[] accountUuids, final String[] folderNames, final Message[] messages, final String query, final boolean integrate, final Flag[] requiredFlags, final Flag[] forbiddenFlags, final MessagingListener listener)
{
final AccountStats stats = new AccountStats();
final Set<String> accountUuidsSet = new HashSet<String>();
if (accountUuids != null)
{
for (String accountUuid : accountUuids)
{
accountUuidsSet.add(accountUuid);
}
}
final Preferences prefs = Preferences.getPreferences(mApplication.getApplicationContext());
Account[] accounts = prefs.getAccounts();
List<LocalFolder> foldersToSearch = null;
boolean displayableOnly = false;
boolean noSpecialFolders = true;
for (final Account account : accounts)
{
if (!account.isAvailable(mApplication))
{
Log.d(K9.LOG_TAG, "searchLocalMessagesSynchronous() ignores account that is not available");
continue;
}
if (accountUuids != null && !accountUuidsSet.contains(account.getUuid()))
{
continue;
}
if (accountUuids != null && accountUuidsSet.contains(account.getUuid()))
{
displayableOnly = true;
noSpecialFolders = true;
}
else if (!integrate && folderNames == null)
{
Account.Searchable searchableFolders = account.getSearchableFolders();
switch (searchableFolders)
{
case NONE:
continue;
case DISPLAYABLE:
displayableOnly = true;
break;
}
}
List<Message> messagesToSearch = null;
if (messages != null)
{
messagesToSearch = new LinkedList<Message>();
for (Message message : messages)
{
if (message.getFolder().getAccount().getUuid().equals(account.getUuid()))
{
messagesToSearch.add(message);
}
}
if (messagesToSearch.isEmpty())
{
continue;
}
}
if (listener != null)
{
listener.listLocalMessagesStarted(account, null);
}
if (integrate || displayableOnly || folderNames != null || noSpecialFolders)
{
List<LocalFolder> tmpFoldersToSearch = new LinkedList<LocalFolder>();
try
{
LocalStore store = account.getLocalStore();
List<? extends Folder> folders = store.getPersonalNamespaces(false);
Set<String> folderNameSet = null;
if (folderNames != null)
{
folderNameSet = new HashSet<String>();
for (String folderName : folderNames)
{
folderNameSet.add(folderName);
}
}
for (Folder folder : folders)
{
LocalFolder localFolder = (LocalFolder)folder;
boolean include = true;
folder.refresh(prefs);
String localFolderName = localFolder.getName();
if (integrate)
{
include = localFolder.isIntegrate();
}
else
{
if (folderNameSet != null)
{
if (!folderNameSet.contains(localFolderName))
{
include = false;
}
}
// Never exclude the INBOX (see issue 1817)
else if (noSpecialFolders && !localFolderName.equals(K9.INBOX) && (
localFolderName.equals(account.getTrashFolderName()) ||
localFolderName.equals(account.getOutboxFolderName()) ||
localFolderName.equals(account.getDraftsFolderName()) ||
localFolderName.equals(account.getSentFolderName()) ||
localFolderName.equals(account.getErrorFolderName())))
{
include = false;
}
else if (displayableOnly && modeMismatch(account.getFolderDisplayMode(), folder.getDisplayClass()))
{
include = false;
}
}
if (include)
{
tmpFoldersToSearch.add(localFolder);
}
}
if (tmpFoldersToSearch.size() < 1)
{
continue;
}
foldersToSearch = tmpFoldersToSearch;
}
catch (MessagingException me)
{
Log.e(K9.LOG_TAG, "Unable to restrict search folders in Account " + account.getDescription() + ", searching all", me);
addErrorMessage(account, null, me);
}
}
MessageRetrievalListener retrievalListener = new MessageRetrievalListener()
{
public void messageStarted(String message, int number, int ofTotal) {}
public void messageFinished(Message message, int number, int ofTotal)
{
if (!isMessageSuppressed(message.getFolder().getAccount(), message.getFolder().getName(), message))
{
List<Message> messages = new ArrayList<Message>();
messages.add(message);
stats.unreadMessageCount += (!message.isSet(Flag.SEEN)) ? 1 : 0;
stats.flaggedMessageCount += (message.isSet(Flag.FLAGGED)) ? 1 : 0;
if (listener != null)
{
listener.listLocalMessagesAddMessages(account, null, messages);
}
}
}
public void messagesFinished(int number)
{
}
};
try
{
String[] queryFields = {"html_content","subject","sender_list"};
LocalStore localStore = account.getLocalStore();
localStore.searchForMessages(retrievalListener, queryFields
, query, foldersToSearch,
messagesToSearch == null ? null : messagesToSearch.toArray(EMPTY_MESSAGE_ARRAY),
requiredFlags, forbiddenFlags);
}
catch (Exception e)
{
if (listener != null)
{
listener.listLocalMessagesFailed(account, null, e.getMessage());
}
addErrorMessage(account, null, e);
}
finally
{
if (listener != null)
{
listener.listLocalMessagesFinished(account, null);
}
}
}
if (listener != null)
{
listener.searchStats(stats);
}
}
public void loadMoreMessages(Account account, String folder, MessagingListener listener)
{
try
{
LocalStore localStore = account.getLocalStore();
LocalFolder localFolder = localStore.getFolder(folder);
if (localFolder.getVisibleLimit() > 0 )
{
localFolder.setVisibleLimit(localFolder.getVisibleLimit() + account.getDisplayCount());
}
synchronizeMailbox(account, folder, listener, null);
}
catch (MessagingException me)
{
addErrorMessage(account, null, me);
throw new RuntimeException("Unable to set visible limit on folder", me);
}
}
public void resetVisibleLimits(Collection<Account> accounts)
{
for (Account account : accounts)
{
account.resetVisibleLimits();
}
}
/**
* Start background synchronization of the specified folder.
* @param account
* @param folder
* @param listener
* @param providedRemoteFolder TODO
*/
public void synchronizeMailbox(final Account account, final String folder, final MessagingListener listener, final Folder providedRemoteFolder)
{
putBackground("synchronizeMailbox", listener, new Runnable()
{
public void run()
{
synchronizeMailboxSynchronous(account, folder, listener, providedRemoteFolder);
}
});
}
/**
* Start foreground synchronization of the specified folder. This is generally only called
* by synchronizeMailbox.
* @param account
* @param folder
*
* TODO Break this method up into smaller chunks.
* @param providedRemoteFolder TODO
*/
private void synchronizeMailboxSynchronous(final Account account, final String folder, final MessagingListener listener, Folder providedRemoteFolder)
{
Folder remoteFolder = null;
LocalFolder tLocalFolder = null;
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Synchronizing folder " + account.getDescription() + ":" + folder);
for (MessagingListener l : getListeners(listener))
{
l.synchronizeMailboxStarted(account, folder);
}
/*
* We don't ever sync the Outbox or errors folder
*/
if (folder.equals(account.getOutboxFolderName()) || folder.equals(account.getErrorFolderName()))
{
for (MessagingListener l : getListeners(listener))
{
l.synchronizeMailboxFinished(account, folder, 0, 0);
}
return;
}
Exception commandException = null;
try
{
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "SYNC: About to process pending commands for account " +
account.getDescription());
try
{
processPendingCommandsSynchronous(account);
}
catch (Exception e)
{
addErrorMessage(account, null, e);
Log.e(K9.LOG_TAG, "Failure processing command, but allow message sync attempt", e);
commandException = e;
}
/*
* Get the message list from the local store and create an index of
* the uids within the list.
*/
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "SYNC: About to get local folder " + folder);
final LocalStore localStore = account.getLocalStore();
tLocalFolder = localStore.getFolder(folder);
final LocalFolder localFolder = tLocalFolder;
localFolder.open(OpenMode.READ_WRITE);
Message[] localMessages = localFolder.getMessages(null);
HashMap<String, Message> localUidMap = new HashMap<String, Message>();
for (Message message : localMessages)
{
localUidMap.put(message.getUid(), message);
}
if (providedRemoteFolder != null)
{
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "SYNC: using providedRemoteFolder " + folder);
remoteFolder = providedRemoteFolder;
}
else
{
Store remoteStore = account.getRemoteStore();
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "SYNC: About to get remote folder " + folder);
remoteFolder = remoteStore.getFolder(folder);
if (! verifyOrCreateRemoteSpecialFolder(account, folder, remoteFolder, listener))
{
return;
}
/*
* Synchronization process:
Open the folder
Upload any local messages that are marked as PENDING_UPLOAD (Drafts, Sent, Trash)
Get the message count
Get the list of the newest K9.DEFAULT_VISIBLE_LIMIT messages
getMessages(messageCount - K9.DEFAULT_VISIBLE_LIMIT, messageCount)
See if we have each message locally, if not fetch it's flags and envelope
Get and update the unread count for the folder
Update the remote flags of any messages we have locally with an internal date
newer than the remote message.
Get the current flags for any messages we have locally but did not just download
Update local flags
For any message we have locally but not remotely, delete the local message to keep
cache clean.
Download larger parts of any new messages.
(Optional) Download small attachments in the background.
*/
/*
* Open the remote folder. This pre-loads certain metadata like message count.
*/
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "SYNC: About to open remote folder " + folder);
remoteFolder.open(OpenMode.READ_WRITE);
if (Account.EXPUNGE_ON_POLL.equals(account.getExpungePolicy()))
{
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "SYNC: Expunging folder " + account.getDescription() + ":" + folder);
remoteFolder.expunge();
}
}
/*
* Get the remote message count.
*/
int remoteMessageCount = remoteFolder.getMessageCount();
int visibleLimit = localFolder.getVisibleLimit();
if (visibleLimit < 0)
{
visibleLimit = K9.DEFAULT_VISIBLE_LIMIT;
}
Message[] remoteMessageArray = EMPTY_MESSAGE_ARRAY;
final ArrayList<Message> remoteMessages = new ArrayList<Message>();
// final ArrayList<Message> unsyncedMessages = new ArrayList<Message>();
HashMap<String, Message> remoteUidMap = new HashMap<String, Message>();
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "SYNC: Remote message count for folder " + folder + " is " + remoteMessageCount);
final Date earliestDate = account.getEarliestPollDate();
if (remoteMessageCount > 0)
{
/*
* Message numbers start at 1.
*/
int remoteStart;
if (visibleLimit > 0 )
{
remoteStart = Math.max(0, remoteMessageCount - visibleLimit) + 1;
}
else
{
remoteStart = 1;
}
int remoteEnd = remoteMessageCount;
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "SYNC: About to get messages " + remoteStart + " through " + remoteEnd + " for folder " + folder);
final AtomicInteger headerProgress = new AtomicInteger(0);
for (MessagingListener l : getListeners(listener))
{
l.synchronizeMailboxHeadersStarted(account, folder);
}
remoteMessageArray = remoteFolder.getMessages(remoteStart, remoteEnd, earliestDate, null);
int messageCount = remoteMessageArray.length;
for (Message thisMess : remoteMessageArray)
{
headerProgress.incrementAndGet();
for (MessagingListener l : getListeners(listener))
{
l.synchronizeMailboxHeadersProgress(account, folder, headerProgress.get(), messageCount);
}
Message localMessage = localUidMap.get(thisMess.getUid());
if (localMessage == null || !localMessage.olderThan(earliestDate))
{
remoteMessages.add(thisMess);
remoteUidMap.put(thisMess.getUid(), thisMess);
}
}
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "SYNC: Got " + remoteUidMap.size() + " messages for folder " + folder);
remoteMessageArray = null;
for (MessagingListener l : getListeners(listener))
{
l.synchronizeMailboxHeadersFinished(account, folder, headerProgress.get(), remoteUidMap.size());
}
}
else if (remoteMessageCount < 0)
{
throw new Exception("Message count " + remoteMessageCount + " for folder " + folder);
}
/*
* Remove any messages that are in the local store but no longer on the remote store or are too old
*/
if (account.syncRemoteDeletions())
{
for (Message localMessage : localMessages)
{
if (remoteUidMap.get(localMessage.getUid()) == null)
{
localMessage.destroy();
for (MessagingListener l : getListeners(listener))
{
l.synchronizeMailboxRemovedMessage(account, folder, localMessage);
}
}
}
}
localMessages = null;
/*
* Now we download the actual content of messages.
*/
int newMessages = downloadMessages(account, remoteFolder, localFolder, remoteMessages, false);
int unreadMessageCount = setLocalUnreadCountToRemote(localFolder, remoteFolder, newMessages);
setLocalFlaggedCountToRemote(localFolder, remoteFolder);
for (MessagingListener l : getListeners())
{
l.folderStatusChanged(account, folder, unreadMessageCount);
}
/*
* Notify listeners that we're finally done.
*/
localFolder.setLastChecked(System.currentTimeMillis());
localFolder.setStatus(null);
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "Done synchronizing folder " +
account.getDescription() + ":" + folder + " @ " + new Date() +
" with " + newMessages + " new messages");
for (MessagingListener l : getListeners(listener))
{
l.synchronizeMailboxFinished(account, folder, remoteMessageCount, newMessages);
}
if (commandException != null)
{
String rootMessage = getRootCauseMessage(commandException);
Log.e(K9.LOG_TAG, "Root cause failure in " + account.getDescription() + ":" +
tLocalFolder.getName() + " was '" + rootMessage + "'");
localFolder.setStatus(rootMessage);
for (MessagingListener l : getListeners(listener))
{
l.synchronizeMailboxFailed(account, folder, rootMessage);
}
}
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Done synchronizing folder " + account.getDescription() + ":" + folder);
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "synchronizeMailbox", e);
// If we don't set the last checked, it can try too often during
// failure conditions
String rootMessage = getRootCauseMessage(e);
if (tLocalFolder != null)
{
try
{
tLocalFolder.setStatus(rootMessage);
tLocalFolder.setLastChecked(System.currentTimeMillis());
}
catch (MessagingException me)
{
Log.e(K9.LOG_TAG, "Could not set last checked on folder " + account.getDescription() + ":" +
tLocalFolder.getName(), e);
}
}
for (MessagingListener l : getListeners(listener))
{
l.synchronizeMailboxFailed(
account,
folder,
rootMessage);
}
addErrorMessage(account, null, e);
Log.e(K9.LOG_TAG, "Failed synchronizing folder " +
account.getDescription() + ":" + folder + " @ " + new Date());
}
finally
{
if (providedRemoteFolder == null && remoteFolder != null)
{
remoteFolder.close();
}
if (tLocalFolder != null)
{
tLocalFolder.close();
}
}
}
/*
* If the folder is a "special" folder we need to see if it exists
* on the remote server. It if does not exist we'll try to create it. If we
* can't create we'll abort. This will happen on every single Pop3 folder as
* designed and on Imap folders during error conditions. This allows us
* to treat Pop3 and Imap the same in this code.
*/
private boolean verifyOrCreateRemoteSpecialFolder(final Account account, final String folder, final Folder remoteFolder, final MessagingListener listener) throws MessagingException
{
if (folder.equals(account.getTrashFolderName()) ||
folder.equals(account.getSentFolderName()) ||
folder.equals(account.getDraftsFolderName()))
{
if (!remoteFolder.exists())
{
if (!remoteFolder.create(FolderType.HOLDS_MESSAGES))
{
for (MessagingListener l : getListeners(listener))
{
l.synchronizeMailboxFinished(account, folder, 0, 0);
}
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Done synchronizing folder " + folder);
return false;
}
}
}
return true;
}
private int setLocalUnreadCountToRemote(LocalFolder localFolder, Folder remoteFolder, int newMessageCount) throws MessagingException
{
int remoteUnreadMessageCount = remoteFolder.getUnreadMessageCount();
if (remoteUnreadMessageCount != -1)
{
localFolder.setUnreadMessageCount(remoteUnreadMessageCount);
}
else
{
int unreadCount = 0;
Message[] messages = localFolder.getMessages(null, false);
for (Message message : messages)
{
if (!message.isSet(Flag.SEEN) && !message.isSet(Flag.DELETED))
{
unreadCount++;
}
}
localFolder.setUnreadMessageCount(unreadCount);
}
return localFolder.getUnreadMessageCount();
}
private void setLocalFlaggedCountToRemote(LocalFolder localFolder, Folder remoteFolder) throws MessagingException
{
int remoteFlaggedMessageCount = remoteFolder.getFlaggedMessageCount();
if (remoteFlaggedMessageCount != -1)
{
localFolder.setFlaggedMessageCount(remoteFlaggedMessageCount);
}
else
{
int flaggedCount = 0;
Message[] messages = localFolder.getMessages(null, false);
for (Message message : messages)
{
if (message.isSet(Flag.FLAGGED) && !message.isSet(Flag.DELETED))
{
flaggedCount++;
}
}
localFolder.setFlaggedMessageCount(flaggedCount);
}
}
private int downloadMessages(final Account account, final Folder remoteFolder,
final LocalFolder localFolder, List<Message> inputMessages, boolean flagSyncOnly) throws MessagingException
{
final Date earliestDate = account.getEarliestPollDate();
if (earliestDate != null)
{
if (K9.DEBUG)
{
Log.d(K9.LOG_TAG, "Only syncing messages after " + earliestDate);
}
}
final String folder = remoteFolder.getName();
int unreadBeforeStart = 0;
try
{
AccountStats stats = account.getStats(mApplication);
unreadBeforeStart = stats.unreadMessageCount;
}
catch (MessagingException e)
{
Log.e(K9.LOG_TAG, "Unable to getUnreadMessageCount for account: " + account, e);
}
ArrayList<Message> syncFlagMessages = new ArrayList<Message>();
List<Message> unsyncedMessages = new ArrayList<Message>();
final AtomicInteger newMessages = new AtomicInteger(0);
List<Message> messages = new ArrayList<Message>(inputMessages);
for (Message message : messages)
{
if (message.isSet(Flag.DELETED))
{
syncFlagMessages.add(message);
}
else if (!isMessageSuppressed(account, folder, message))
{
Message localMessage = localFolder.getMessage(message.getUid());
if (localMessage == null)
{
if (!flagSyncOnly)
{
if (!message.isSet(Flag.X_DOWNLOADED_FULL) && !message.isSet(Flag.X_DOWNLOADED_PARTIAL))
{
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Message with uid " + message.getUid() + " has not yet been downloaded");
unsyncedMessages.add(message);
}
else
{
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Message with uid " + message.getUid() + " is partially or fully downloaded");
// Store the updated message locally
localFolder.appendMessages(new Message[] { message });
localMessage = localFolder.getMessage(message.getUid());
localMessage.setFlag(Flag.X_DOWNLOADED_FULL, message.isSet(Flag.X_DOWNLOADED_FULL));
localMessage.setFlag(Flag.X_DOWNLOADED_PARTIAL, message.isSet(Flag.X_DOWNLOADED_PARTIAL));
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxAddOrUpdateMessage(account, folder, localMessage);
if (!localMessage.isSet(Flag.SEEN))
{
l.synchronizeMailboxNewMessage(account, folder, localMessage);
}
}
}
}
}
else if (!localMessage.isSet(Flag.DELETED))
{
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Message with uid " + message.getUid() + " is present in the local store");
if (!localMessage.isSet(Flag.X_DOWNLOADED_FULL) && !localMessage.isSet(Flag.X_DOWNLOADED_PARTIAL))
{
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Message with uid " + message.getUid()
+ " is not downloaded, even partially; trying again");
unsyncedMessages.add(message);
}
else
{
String newPushState = remoteFolder.getNewPushState(localFolder.getPushState(), message);
if (newPushState != null)
{
localFolder.setPushState(newPushState);
}
syncFlagMessages.add(message);
}
}
}
}
final AtomicInteger progress = new AtomicInteger(0);
final int todo = unsyncedMessages.size() + syncFlagMessages.size();
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxProgress(account, folder, progress.get(), todo);
}
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "SYNC: Have " + unsyncedMessages.size() + " unsynced messages");
messages.clear();
final ArrayList<Message> largeMessages = new ArrayList<Message>();
final ArrayList<Message> smallMessages = new ArrayList<Message>();
if (unsyncedMessages.size() > 0)
{
/*
* Reverse the order of the messages. Depending on the server this may get us
* fetch results for newest to oldest. If not, no harm done.
*/
Collections.reverse(unsyncedMessages);
int visibleLimit = localFolder.getVisibleLimit();
int listSize = unsyncedMessages.size();
if ((visibleLimit > 0) && (listSize > visibleLimit))
{
unsyncedMessages = unsyncedMessages.subList(listSize - visibleLimit, listSize);
}
FetchProfile fp = new FetchProfile();
if (remoteFolder.supportsFetchingFlags())
{
fp.add(FetchProfile.Item.FLAGS);
}
fp.add(FetchProfile.Item.ENVELOPE);
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "SYNC: About to fetch " + unsyncedMessages.size() + " unsynced messages for folder " + folder);
fetchUnsyncedMessages(account, remoteFolder, localFolder, unsyncedMessages, smallMessages,largeMessages, progress, todo, fp);
// If a message didn't exist, messageFinished won't be called, but we shouldn't try again
// If we got here, nothing failed
for (Message message : unsyncedMessages)
{
String newPushState = remoteFolder.getNewPushState(localFolder.getPushState(), message);
if (newPushState != null)
{
localFolder.setPushState(newPushState);
}
}
if (K9.DEBUG)
{
Log.d(K9.LOG_TAG, "SYNC: Synced unsynced messages for folder " + folder);
}
}
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "SYNC: Have "
+ largeMessages.size() + " large messages and "
+ smallMessages.size() + " small messages out of "
+ unsyncedMessages.size() + " unsynced messages");
unsyncedMessages.clear();
/*
* Grab the content of the small messages first. This is going to
* be very fast and at very worst will be a single up of a few bytes and a single
* download of 625k.
*/
FetchProfile fp = new FetchProfile();
fp.add(FetchProfile.Item.BODY);
// fp.add(FetchProfile.Item.FLAGS);
// fp.add(FetchProfile.Item.ENVELOPE);
downloadSmallMessages(account, remoteFolder, localFolder, smallMessages, progress, unreadBeforeStart, newMessages, todo, fp);
smallMessages.clear();
/*
* Now do the large messages that require more round trips.
*/
fp.clear();
fp.add(FetchProfile.Item.STRUCTURE);
downloadLargeMessages(account, remoteFolder, localFolder, largeMessages, progress, unreadBeforeStart, newMessages, todo, fp);
largeMessages.clear();
/*
* Refresh the flags for any messages in the local store that we didn't just
* download.
*/
refreshLocalMessageFlags(account,remoteFolder,localFolder,syncFlagMessages,progress,todo);
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "SYNC: Synced remote messages for folder " + folder + ", " + newMessages.get() + " new messages");
localFolder.purgeToVisibleLimit(new MessageRemovalListener()
{
public void messageRemoved(Message message)
{
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxRemovedMessage(account, folder, message);
}
}
});
return newMessages.get();
}
private void fetchUnsyncedMessages(final Account account, final Folder remoteFolder,
final LocalFolder localFolder,
List<Message> unsyncedMessages,
final ArrayList<Message> smallMessages,
final ArrayList<Message> largeMessages,
final AtomicInteger progress,
final int todo,
FetchProfile fp) throws MessagingException
{
final String folder = remoteFolder.getName();
final Date earliestDate = account.getEarliestPollDate();
remoteFolder.fetch(unsyncedMessages.toArray(EMPTY_MESSAGE_ARRAY), fp,
new MessageRetrievalListener()
{
public void messageFinished(Message message, int number, int ofTotal)
{
try
{
String newPushState = remoteFolder.getNewPushState(localFolder.getPushState(), message);
if (newPushState != null)
{
localFolder.setPushState(newPushState);
}
if (message.isSet(Flag.DELETED) || message.olderThan(earliestDate))
{
if (K9.DEBUG)
{
if (message.isSet(Flag.DELETED))
{
Log.v(K9.LOG_TAG, "Newly downloaded message " + account + ":" + folder + ":" + message.getUid()
+ " was marked deleted on server, skipping");
}
else
{
Log.d(K9.LOG_TAG, "Newly downloaded message " + message.getUid() + " is older than "
+ earliestDate + ", skipping");
}
}
progress.incrementAndGet();
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxProgress(account, folder, progress.get(), todo);
}
return;
}
if (message.getSize() > account.getMaximumAutoDownloadMessageSize())
{
largeMessages.add(message);
}
else
{
smallMessages.add(message);
}
// And include it in the view
if (message.getSubject() != null &&
message.getFrom() != null)
{
/*
* We check to make sure that we got something worth
* showing (subject and from) because some protocols
* (POP) may not be able to give us headers for
* ENVELOPE, only size.
*/
if (!isMessageSuppressed(account, folder, message))
{
// Store the new message locally
localFolder.appendMessages(new Message[]
{
message
});
Message localMessage = localFolder.getMessage(message.getUid());
syncFlags(localMessage, message);
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "About to notify listeners that we got a new unsynced message "
+ account + ":" + folder + ":" + message.getUid());
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxAddOrUpdateMessage(account, folder, localMessage);
}
}
}
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Error while storing downloaded message.", e);
addErrorMessage(account, null, e);
}
}
public void messageStarted(String uid, int number, int ofTotal)
{
}
public void messagesFinished(int total) {}
});
}
private boolean shouldImportMessage(final Account account, final String folder, final Message message, final AtomicInteger progress, final Date earliestDate)
{
if (isMessageSuppressed(account, folder, message))
{
if (K9.DEBUG)
{
Log.d(K9.LOG_TAG, "Message " + message.getUid() + " was suppressed "+
"but just downloaded. "+
"The race condition means we wasted some bandwidth. Oh well.");
}
return false;
}
if (message.olderThan(earliestDate))
{
if (K9.DEBUG)
{
Log.d(K9.LOG_TAG, "Message " + message.getUid() + " is older than "
+ earliestDate + ", hence not saving");
}
return false;
}
return true;
}
private void downloadSmallMessages(final Account account, final Folder remoteFolder,
final LocalFolder localFolder,
ArrayList<Message> smallMessages,
final AtomicInteger progress,
final int unreadBeforeStart,
final AtomicInteger newMessages,
final int todo,
FetchProfile fp) throws MessagingException
{
final String folder = remoteFolder.getName();
final Date earliestDate = account.getEarliestPollDate();
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "SYNC: Fetching small messages for folder " + folder);
remoteFolder.fetch(smallMessages.toArray(new Message[smallMessages.size()]),
fp, new MessageRetrievalListener()
{
public void messageFinished(Message message, int number, int ofTotal)
{
try
{
if (!shouldImportMessage(account, folder, message, progress, earliestDate))
{
progress.incrementAndGet();
return;
}
// Store the updated message locally
localFolder.appendMessages(new Message[] { message });
Message localMessage = localFolder.getMessage(message.getUid());
progress.incrementAndGet();
// Set a flag indicating this message has now be fully downloaded
localMessage.setFlag(Flag.X_DOWNLOADED_FULL, true);
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "About to notify listeners that we got a new small message "
+ account + ":" + folder + ":" + message.getUid());
// Update the listener with what we've found
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxAddOrUpdateMessage(account, folder, localMessage);
l.synchronizeMailboxProgress(account, folder, progress.get(), todo);
if (!localMessage.isSet(Flag.SEEN))
{
l.synchronizeMailboxNewMessage(account, folder, localMessage);
}
}
// Send a notification of this message
if (shouldNotifyForMessage(account, message))
{
newMessages.incrementAndGet();
notifyAccount(mApplication, account, message, unreadBeforeStart, newMessages);
}
}
catch (MessagingException me)
{
addErrorMessage(account, null, me);
Log.e(K9.LOG_TAG, "SYNC: fetch small messages", me);
}
}
public void messageStarted(String uid, int number, int ofTotal)
{
}
public void messagesFinished(int total) {}
});
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "SYNC: Done fetching small messages for folder " + folder);
}
private void downloadLargeMessages(final Account account, final Folder remoteFolder,
final LocalFolder localFolder,
ArrayList<Message> largeMessages,
final AtomicInteger progress,
final int unreadBeforeStart,
final AtomicInteger newMessages,
final int todo,
FetchProfile fp) throws MessagingException
{
final String folder = remoteFolder.getName();
final Date earliestDate = account.getEarliestPollDate();
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "SYNC: Fetching large messages for folder " + folder);
remoteFolder.fetch(largeMessages.toArray(new Message[largeMessages.size()]), fp, null);
for (Message message : largeMessages)
{
if (!shouldImportMessage(account, folder, message, progress, earliestDate))
{
progress.incrementAndGet();
continue;
}
if (message.getBody() == null)
{
/*
* The provider was unable to get the structure of the message, so
* we'll download a reasonable portion of the messge and mark it as
* incomplete so the entire thing can be downloaded later if the user
* wishes to download it.
*/
fp.clear();
fp.add(FetchProfile.Item.BODY_SANE);
/*
* TODO a good optimization here would be to make sure that all Stores set
* the proper size after this fetch and compare the before and after size. If
* they equal we can mark this SYNCHRONIZED instead of PARTIALLY_SYNCHRONIZED
*/
remoteFolder.fetch(new Message[] { message }, fp, null);
// Store the updated message locally
localFolder.appendMessages(new Message[] { message });
Message localMessage = localFolder.getMessage(message.getUid());
// Certain (POP3) servers give you the whole message even when you ask for only the first x Kb
if (!message.isSet(Flag.X_DOWNLOADED_FULL))
{
/*
* Mark the message as fully downloaded if the message size is smaller than
* the account's autodownload size limit, otherwise mark as only a partial
* download. This will prevent the system from downloading the same message
* twice.
*/
if (message.getSize() < account.getMaximumAutoDownloadMessageSize())
{
localMessage.setFlag(Flag.X_DOWNLOADED_FULL, true);
}
else
{
// Set a flag indicating that the message has been partially downloaded and
// is ready for view.
localMessage.setFlag(Flag.X_DOWNLOADED_PARTIAL, true);
}
}
}
else
{
/*
* We have a structure to deal with, from which
* we can pull down the parts we want to actually store.
* Build a list of parts we are interested in. Text parts will be downloaded
* right now, attachments will be left for later.
*/
ArrayList<Part> viewables = new ArrayList<Part>();
ArrayList<Part> attachments = new ArrayList<Part>();
MimeUtility.collectParts(message, viewables, attachments);
/*
* Now download the parts we're interested in storing.
*/
for (Part part : viewables)
{
remoteFolder.fetchPart(message, part, null);
}
// Store the updated message locally
localFolder.appendMessages(new Message[] { message });
Message localMessage = localFolder.getMessage(message.getUid());
// Set a flag indicating this message has been fully downloaded and can be
// viewed.
localMessage.setFlag(Flag.X_DOWNLOADED_PARTIAL, true);
}
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "About to notify listeners that we got a new large message "
+ account + ":" + folder + ":" + message.getUid());
// Update the listener with what we've found
progress.incrementAndGet();
Message localMessage = localFolder.getMessage(message.getUid());
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxAddOrUpdateMessage(account, folder, localMessage);
l.synchronizeMailboxProgress(account, folder, progress.get(), todo);
if (!localMessage.isSet(Flag.SEEN))
{
l.synchronizeMailboxNewMessage(account, folder, localMessage);
}
}
// Send a notification of this message
if (shouldNotifyForMessage(account, message))
{
newMessages.incrementAndGet();
notifyAccount(mApplication, account, message, unreadBeforeStart, newMessages);
}
}//for large messsages
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "SYNC: Done fetching large messages for folder " + folder);
}
private void refreshLocalMessageFlags(final Account account, final Folder remoteFolder,
final LocalFolder localFolder,
ArrayList<Message> syncFlagMessages,
final AtomicInteger progress,
final int todo
) throws MessagingException
{
final String folder = remoteFolder.getName();
if (remoteFolder.supportsFetchingFlags())
{
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "SYNC: About to sync flags for "
+ syncFlagMessages.size() + " remote messages for folder " + folder);
FetchProfile fp = new FetchProfile();
fp.add(FetchProfile.Item.FLAGS);
List<Message> undeletedMessages = new LinkedList<Message>();
for (Message message : syncFlagMessages)
{
if (!message.isSet(Flag.DELETED))
{
undeletedMessages.add(message);
}
}
remoteFolder.fetch(undeletedMessages.toArray(EMPTY_MESSAGE_ARRAY), fp, null);
for (Message remoteMessage : syncFlagMessages)
{
Message localMessage = localFolder.getMessage(remoteMessage.getUid());
boolean messageChanged = syncFlags(localMessage, remoteMessage);
if (messageChanged)
{
if (localMessage.isSet(Flag.DELETED) || isMessageSuppressed(account, folder, localMessage))
{
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxRemovedMessage(account, folder, localMessage);
}
}
else
{
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxAddOrUpdateMessage(account, folder, localMessage);
}
}
}
progress.incrementAndGet();
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxProgress(account, folder, progress.get(), todo);
}
}
}
}
private boolean syncFlags(Message localMessage, Message remoteMessage) throws MessagingException
{
boolean messageChanged = false;
if (localMessage == null || localMessage.isSet(Flag.DELETED))
{
return false;
}
if (remoteMessage.isSet(Flag.DELETED))
{
if (localMessage.getFolder().getAccount().syncRemoteDeletions())
{
localMessage.setFlag(Flag.DELETED, true);
messageChanged = true;
}
}
else
{
for (Flag flag : new Flag[] { Flag.SEEN, Flag.FLAGGED, Flag.ANSWERED })
{
if (remoteMessage.isSet(flag) != localMessage.isSet(flag))
{
localMessage.setFlag(flag, remoteMessage.isSet(flag));
messageChanged = true;
}
}
}
return messageChanged;
}
private String getRootCauseMessage(Throwable t)
{
Throwable rootCause = t;
Throwable nextCause = rootCause;
do
{
nextCause = rootCause.getCause();
if (nextCause != null)
{
rootCause = nextCause;
}
}
while (nextCause != null);
return rootCause.getMessage();
}
private void queuePendingCommand(Account account, PendingCommand command)
{
try
{
LocalStore localStore = account.getLocalStore();
localStore.addPendingCommand(command);
}
catch (Exception e)
{
addErrorMessage(account, null, e);
throw new RuntimeException("Unable to enqueue pending command", e);
}
}
private void processPendingCommands(final Account account)
{
putBackground("processPendingCommands", null, new Runnable()
{
public void run()
{
try
{
processPendingCommandsSynchronous(account);
}
catch (UnavailableStorageException e)
{
Log.i(K9.LOG_TAG, "Failed to process pending command because storage is not available - trying again later.");
throw new UnavailableAccountException(e);
}
catch (MessagingException me)
{
Log.e(K9.LOG_TAG, "processPendingCommands", me);
addErrorMessage(account, null, me);
/*
* Ignore any exceptions from the commands. Commands will be processed
* on the next round.
*/
}
}
});
}
private void processPendingCommandsSynchronous(Account account) throws MessagingException
{
LocalStore localStore = account.getLocalStore();
ArrayList<PendingCommand> commands = localStore.getPendingCommands();
int progress = 0;
int todo = commands.size();
if (todo == 0)
{
return;
}
for (MessagingListener l : getListeners())
{
l.pendingCommandsProcessing(account);
l.synchronizeMailboxProgress(account, null, progress, todo);
}
PendingCommand processingCommand = null;
try
{
for (PendingCommand command : commands)
{
processingCommand = command;
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "Processing pending command '" + command + "'");
String[] components = command.command.split("\\.");
String commandTitle = components[components.length - 1];
for (MessagingListener l : getListeners())
{
l.pendingCommandStarted(account, commandTitle);
}
/*
* We specifically do not catch any exceptions here. If a command fails it is
* most likely due to a server or IO error and it must be retried before any
* other command processes. This maintains the order of the commands.
*/
try
{
if (PENDING_COMMAND_APPEND.equals(command.command))
{
processPendingAppend(command, account);
}
else if (PENDING_COMMAND_SET_FLAG_BULK.equals(command.command))
{
processPendingSetFlag(command, account);
}
else if (PENDING_COMMAND_SET_FLAG.equals(command.command))
{
processPendingSetFlagOld(command, account);
}
else if (PENDING_COMMAND_MARK_ALL_AS_READ.equals(command.command))
{
processPendingMarkAllAsRead(command, account);
}
else if (PENDING_COMMAND_MOVE_OR_COPY_BULK.equals(command.command))
{
processPendingMoveOrCopy(command, account);
}
else if (PENDING_COMMAND_MOVE_OR_COPY.equals(command.command))
{
processPendingMoveOrCopyOld(command, account);
}
else if (PENDING_COMMAND_EMPTY_TRASH.equals(command.command))
{
processPendingEmptyTrash(command, account);
}
else if (PENDING_COMMAND_EXPUNGE.equals(command.command))
{
processPendingExpunge(command, account);
}
localStore.removePendingCommand(command);
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "Done processing pending command '" + command + "'");
}
catch (MessagingException me)
{
if (me.isPermanentFailure())
{
addErrorMessage(account, null, me);
Log.e(K9.LOG_TAG, "Failure of command '" + command + "' was permanent, removing command from queue");
localStore.removePendingCommand(processingCommand);
}
else
{
throw me;
}
}
finally
{
progress++;
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxProgress(account, null, progress, todo);
l.pendingCommandCompleted(account, commandTitle);
}
}
}
}
catch (MessagingException me)
{
addErrorMessage(account, null, me);
Log.e(K9.LOG_TAG, "Could not process command '" + processingCommand + "'", me);
throw me;
}
finally
{
for (MessagingListener l : getListeners())
{
l.pendingCommandsFinished(account);
}
}
}
/**
* Process a pending append message command. This command uploads a local message to the
* server, first checking to be sure that the server message is not newer than
* the local message. Once the local message is successfully processed it is deleted so
* that the server message will be synchronized down without an additional copy being
* created.
* TODO update the local message UID instead of deleteing it
*
* @param command arguments = (String folder, String uid)
* @param account
* @throws MessagingException
*/
private void processPendingAppend(PendingCommand command, Account account)
throws MessagingException
{
Folder remoteFolder = null;
LocalFolder localFolder = null;
try
{
String folder = command.arguments[0];
String uid = command.arguments[1];
if (account.getErrorFolderName().equals(folder))
{
return;
}
LocalStore localStore = account.getLocalStore();
localFolder = localStore.getFolder(folder);
LocalMessage localMessage = (LocalMessage) localFolder.getMessage(uid);
if (localMessage == null)
{
return;
}
Store remoteStore = account.getRemoteStore();
remoteFolder = remoteStore.getFolder(folder);
if (!remoteFolder.exists())
{
if (!remoteFolder.create(FolderType.HOLDS_MESSAGES))
{
return;
}
}
remoteFolder.open(OpenMode.READ_WRITE);
if (remoteFolder.getMode() != OpenMode.READ_WRITE)
{
return;
}
Message remoteMessage = null;
if (!localMessage.getUid().startsWith(K9.LOCAL_UID_PREFIX))
{
remoteMessage = remoteFolder.getMessage(localMessage.getUid());
}
if (remoteMessage == null)
{
if (localMessage.isSet(Flag.X_REMOTE_COPY_STARTED))
{
Log.w(K9.LOG_TAG, "Local message with uid " + localMessage.getUid() +
" has flag " + Flag.X_REMOTE_COPY_STARTED + " already set, checking for remote message with " +
" same message id");
String rUid = remoteFolder.getUidFromMessageId(localMessage);
if (rUid != null)
{
Log.w(K9.LOG_TAG, "Local message has flag " + Flag.X_REMOTE_COPY_STARTED + " already set, and there is a remote message with " +
" uid " + rUid + ", assuming message was already copied and aborting this copy");
String oldUid = localMessage.getUid();
localMessage.setUid(rUid);
localFolder.changeUid(localMessage);
for (MessagingListener l : getListeners())
{
l.messageUidChanged(account, folder, oldUid, localMessage.getUid());
}
return;
}
else
{
Log.w(K9.LOG_TAG, "No remote message with message-id found, proceeding with append");
}
}
/*
* If the message does not exist remotely we just upload it and then
* update our local copy with the new uid.
*/
FetchProfile fp = new FetchProfile();
fp.add(FetchProfile.Item.BODY);
localFolder.fetch(new Message[]
{
localMessage
}
, fp, null);
String oldUid = localMessage.getUid();
localMessage.setFlag(Flag.X_REMOTE_COPY_STARTED, true);
remoteFolder.appendMessages(new Message[] { localMessage });
localFolder.changeUid(localMessage);
for (MessagingListener l : getListeners())
{
l.messageUidChanged(account, folder, oldUid, localMessage.getUid());
}
}
else
{
/*
* If the remote message exists we need to determine which copy to keep.
*/
/*
* See if the remote message is newer than ours.
*/
FetchProfile fp = new FetchProfile();
fp.add(FetchProfile.Item.ENVELOPE);
remoteFolder.fetch(new Message[] { remoteMessage }, fp, null);
Date localDate = localMessage.getInternalDate();
Date remoteDate = remoteMessage.getInternalDate();
if (remoteDate != null && remoteDate.compareTo(localDate) > 0)
{
/*
* If the remote message is newer than ours we'll just
* delete ours and move on. A sync will get the server message
* if we need to be able to see it.
*/
localMessage.destroy();
}
else
{
/*
* Otherwise we'll upload our message and then delete the remote message.
*/
fp.clear();
fp = new FetchProfile();
fp.add(FetchProfile.Item.BODY);
localFolder.fetch(new Message[] { localMessage }, fp, null);
String oldUid = localMessage.getUid();
localMessage.setFlag(Flag.X_REMOTE_COPY_STARTED, true);
remoteFolder.appendMessages(new Message[] { localMessage });
localFolder.changeUid(localMessage);
for (MessagingListener l : getListeners())
{
l.messageUidChanged(account, folder, oldUid, localMessage.getUid());
}
if (remoteDate != null)
{
remoteMessage.setFlag(Flag.DELETED, true);
if (Account.EXPUNGE_IMMEDIATELY.equals(account.getExpungePolicy()))
{
remoteFolder.expunge();
}
}
}
}
}
finally
{
if (remoteFolder != null)
{
remoteFolder.close();
}
if (localFolder != null)
{
localFolder.close();
}
}
}
private void queueMoveOrCopy(Account account, String srcFolder, String destFolder, boolean isCopy, String uids[])
{
if (account.getErrorFolderName().equals(srcFolder))
{
return;
}
PendingCommand command = new PendingCommand();
command.command = PENDING_COMMAND_MOVE_OR_COPY_BULK;
int length = 3 + uids.length;
command.arguments = new String[length];
command.arguments[0] = srcFolder;
command.arguments[1] = destFolder;
command.arguments[2] = Boolean.toString(isCopy);
for (int i = 0; i < uids.length; i++)
{
command.arguments[3 + i] = uids[i];
}
queuePendingCommand(account, command);
}
/**
* Process a pending trash message command.
*
* @param command arguments = (String folder, String uid)
* @param account
* @throws MessagingException
*/
private void processPendingMoveOrCopy(PendingCommand command, Account account)
throws MessagingException
{
Folder remoteSrcFolder = null;
Folder remoteDestFolder = null;
try
{
String srcFolder = command.arguments[0];
if (account.getErrorFolderName().equals(srcFolder))
{
return;
}
String destFolder = command.arguments[1];
String isCopyS = command.arguments[2];
Store remoteStore = account.getRemoteStore();
remoteSrcFolder = remoteStore.getFolder(srcFolder);
List<Message> messages = new ArrayList<Message>();
for (int i = 3; i < command.arguments.length; i++)
{
String uid = command.arguments[i];
if (!uid.startsWith(K9.LOCAL_UID_PREFIX))
{
messages.add(remoteSrcFolder.getMessage(uid));
}
}
boolean isCopy = false;
if (isCopyS != null)
{
isCopy = Boolean.parseBoolean(isCopyS);
}
if (!remoteSrcFolder.exists())
{
throw new MessagingException("processingPendingMoveOrCopy: remoteFolder " + srcFolder + " does not exist", true);
}
remoteSrcFolder.open(OpenMode.READ_WRITE);
if (remoteSrcFolder.getMode() != OpenMode.READ_WRITE)
{
throw new MessagingException("processingPendingMoveOrCopy: could not open remoteSrcFolder " + srcFolder + " read/write", true);
}
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "processingPendingMoveOrCopy: source folder = " + srcFolder
+ ", " + messages.size() + " messages, destination folder = " + destFolder + ", isCopy = " + isCopy);
if (!isCopy && destFolder.equals(account.getTrashFolderName()))
{
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "processingPendingMoveOrCopy doing special case for deleting message");
String destFolderName = destFolder;
if (K9.FOLDER_NONE.equals(destFolderName))
{
destFolderName = null;
}
remoteSrcFolder.delete(messages.toArray(EMPTY_MESSAGE_ARRAY), destFolderName);
}
else
{
remoteDestFolder = remoteStore.getFolder(destFolder);
if (isCopy)
{
remoteSrcFolder.copyMessages(messages.toArray(EMPTY_MESSAGE_ARRAY), remoteDestFolder);
}
else
{
remoteSrcFolder.moveMessages(messages.toArray(EMPTY_MESSAGE_ARRAY), remoteDestFolder);
}
}
if (!isCopy && Account.EXPUNGE_IMMEDIATELY.equals(account.getExpungePolicy()))
{
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "processingPendingMoveOrCopy expunging folder " + account.getDescription() + ":" + srcFolder);
remoteSrcFolder.expunge();
}
}
finally
{
if (remoteSrcFolder != null)
{
remoteSrcFolder.close();
}
if (remoteDestFolder != null)
{
remoteDestFolder.close();
}
}
}
private void queueSetFlag(final Account account, final String folderName, final String newState, final String flag, final String[] uids)
{
putBackground("queueSetFlag " + account.getDescription() + ":" + folderName, null, new Runnable()
{
public void run()
{
PendingCommand command = new PendingCommand();
command.command = PENDING_COMMAND_SET_FLAG_BULK;
int length = 3 + uids.length;
command.arguments = new String[length];
command.arguments[0] = folderName;
command.arguments[1] = newState;
command.arguments[2] = flag;
for (int i = 0; i < uids.length; i++)
{
command.arguments[3 + i] = uids[i];
}
queuePendingCommand(account, command);
processPendingCommands(account);
}
});
}
/**
* Processes a pending mark read or unread command.
*
* @param command arguments = (String folder, String uid, boolean read)
* @param account
*/
private void processPendingSetFlag(PendingCommand command, Account account)
throws MessagingException
{
String folder = command.arguments[0];
if (account.getErrorFolderName().equals(folder))
{
return;
}
boolean newState = Boolean.parseBoolean(command.arguments[1]);
Flag flag = Flag.valueOf(command.arguments[2]);
Store remoteStore = account.getRemoteStore();
Folder remoteFolder = remoteStore.getFolder(folder);
if (!remoteFolder.exists() ||
/*
* Don't proceed if the remote folder doesn't support flags and
* the flag to be changed isn't the deleted flag. This avoids
* unnecessary connections to POP3 servers.
*/
// TODO: This should actually call a supportsSettingFlag(flag) method.
(!remoteFolder.supportsFetchingFlags() && !Flag.DELETED.equals(flag)))
{
return;
}
try
{
remoteFolder.open(OpenMode.READ_WRITE);
if (remoteFolder.getMode() != OpenMode.READ_WRITE)
{
return;
}
List<Message> messages = new ArrayList<Message>();
for (int i = 3; i < command.arguments.length; i++)
{
String uid = command.arguments[i];
if (!uid.startsWith(K9.LOCAL_UID_PREFIX))
{
messages.add(remoteFolder.getMessage(uid));
}
}
if (messages.size() == 0)
{
return;
}
remoteFolder.setFlags(messages.toArray(EMPTY_MESSAGE_ARRAY), new Flag[] { flag }, newState);
}
finally
{
if (remoteFolder != null)
{
remoteFolder.close();
}
}
}
// TODO: This method is obsolete and is only for transition from K-9 2.0 to K-9 2.1
// Eventually, it should be removed
private void processPendingSetFlagOld(PendingCommand command, Account account)
throws MessagingException
{
String folder = command.arguments[0];
String uid = command.arguments[1];
if (account.getErrorFolderName().equals(folder))
{
return;
}
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "processPendingSetFlagOld: folder = " + folder + ", uid = " + uid);
boolean newState = Boolean.parseBoolean(command.arguments[2]);
Flag flag = Flag.valueOf(command.arguments[3]);
Folder remoteFolder = null;
try
{
Store remoteStore = account.getRemoteStore();
remoteFolder = remoteStore.getFolder(folder);
if (!remoteFolder.exists())
{
return;
}
remoteFolder.open(OpenMode.READ_WRITE);
if (remoteFolder.getMode() != OpenMode.READ_WRITE)
{
return;
}
Message remoteMessage = null;
if (!uid.startsWith(K9.LOCAL_UID_PREFIX))
{
remoteMessage = remoteFolder.getMessage(uid);
}
if (remoteMessage == null)
{
return;
}
remoteMessage.setFlag(flag, newState);
}
finally
{
if (remoteFolder != null)
{
remoteFolder.close();
}
}
}
private void queueExpunge(final Account account, final String folderName)
{
putBackground("queueExpunge " + account.getDescription() + ":" + folderName, null, new Runnable()
{
public void run()
{
PendingCommand command = new PendingCommand();
command.command = PENDING_COMMAND_EXPUNGE;
command.arguments = new String[1];
command.arguments[0] = folderName;
queuePendingCommand(account, command);
processPendingCommands(account);
}
});
}
private void processPendingExpunge(PendingCommand command, Account account)
throws MessagingException
{
String folder = command.arguments[0];
if (account.getErrorFolderName().equals(folder))
{
return;
}
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "processPendingExpunge: folder = " + folder);
Store remoteStore = account.getRemoteStore();
Folder remoteFolder = remoteStore.getFolder(folder);
try
{
if (!remoteFolder.exists())
{
return;
}
remoteFolder.open(OpenMode.READ_WRITE);
if (remoteFolder.getMode() != OpenMode.READ_WRITE)
{
return;
}
remoteFolder.expunge();
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "processPendingExpunge: complete for folder = " + folder);
}
finally
{
if (remoteFolder != null)
{
remoteFolder.close();
}
}
}
// TODO: This method is obsolete and is only for transition from K-9 2.0 to K-9 2.1
// Eventually, it should be removed
private void processPendingMoveOrCopyOld(PendingCommand command, Account account)
throws MessagingException
{
String srcFolder = command.arguments[0];
String uid = command.arguments[1];
String destFolder = command.arguments[2];
String isCopyS = command.arguments[3];
boolean isCopy = false;
if (isCopyS != null)
{
isCopy = Boolean.parseBoolean(isCopyS);
}
if (account.getErrorFolderName().equals(srcFolder))
{
return;
}
Store remoteStore = account.getRemoteStore();
Folder remoteSrcFolder = remoteStore.getFolder(srcFolder);
Folder remoteDestFolder = remoteStore.getFolder(destFolder);
if (!remoteSrcFolder.exists())
{
throw new MessagingException("processPendingMoveOrCopyOld: remoteFolder " + srcFolder + " does not exist", true);
}
remoteSrcFolder.open(OpenMode.READ_WRITE);
if (remoteSrcFolder.getMode() != OpenMode.READ_WRITE)
{
throw new MessagingException("processPendingMoveOrCopyOld: could not open remoteSrcFolder " + srcFolder + " read/write", true);
}
Message remoteMessage = null;
if (!uid.startsWith(K9.LOCAL_UID_PREFIX))
{
remoteMessage = remoteSrcFolder.getMessage(uid);
}
if (remoteMessage == null)
{
throw new MessagingException("processPendingMoveOrCopyOld: remoteMessage " + uid + " does not exist", true);
}
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "processPendingMoveOrCopyOld: source folder = " + srcFolder
+ ", uid = " + uid + ", destination folder = " + destFolder + ", isCopy = " + isCopy);
if (!isCopy && destFolder.equals(account.getTrashFolderName()))
{
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "processPendingMoveOrCopyOld doing special case for deleting message");
remoteMessage.delete(account.getTrashFolderName());
remoteSrcFolder.close();
return;
}
remoteDestFolder.open(OpenMode.READ_WRITE);
if (remoteDestFolder.getMode() != OpenMode.READ_WRITE)
{
throw new MessagingException("processPendingMoveOrCopyOld: could not open remoteDestFolder " + srcFolder + " read/write", true);
}
if (isCopy)
{
remoteSrcFolder.copyMessages(new Message[] { remoteMessage }, remoteDestFolder);
}
else
{
remoteSrcFolder.moveMessages(new Message[] { remoteMessage }, remoteDestFolder);
}
remoteSrcFolder.close();
remoteDestFolder.close();
}
private void processPendingMarkAllAsRead(PendingCommand command, Account account) throws MessagingException
{
String folder = command.arguments[0];
Folder remoteFolder = null;
LocalFolder localFolder = null;
try
{
Store localStore = account.getLocalStore();
localFolder = (LocalFolder) localStore.getFolder(folder);
localFolder.open(OpenMode.READ_WRITE);
Message[] messages = localFolder.getMessages(null, false);
for (Message message : messages)
{
if (!message.isSet(Flag.SEEN))
{
message.setFlag(Flag.SEEN, true);
for (MessagingListener l : getListeners())
{
l.listLocalMessagesUpdateMessage(account, folder, message);
}
}
}
localFolder.setUnreadMessageCount(0);
for (MessagingListener l : getListeners())
{
l.folderStatusChanged(account, folder, 0);
}
if (account.getErrorFolderName().equals(folder))
{
return;
}
Store remoteStore = account.getRemoteStore();
remoteFolder = remoteStore.getFolder(folder);
if (!remoteFolder.exists())
{
return;
}
remoteFolder.open(OpenMode.READ_WRITE);
if (remoteFolder.getMode() != OpenMode.READ_WRITE)
{
return;
}
remoteFolder.setFlags(new Flag[] {Flag.SEEN}, true);
remoteFolder.close();
}
catch (UnsupportedOperationException uoe)
{
Log.w(K9.LOG_TAG, "Could not mark all server-side as read because store doesn't support operation", uoe);
}
finally
{
if (localFolder != null)
{
localFolder.close();
}
if (remoteFolder != null)
{
remoteFolder.close();
}
}
}
static long uidfill = 0;
static AtomicBoolean loopCatch = new AtomicBoolean();
public void addErrorMessage(Account account, String subject, Throwable t)
{
if (!loopCatch.compareAndSet(false, true))
{
return;
}
try
{
if (t == null)
{
return;
}
CharArrayWriter baos = new CharArrayWriter(t.getStackTrace().length * 10);
PrintWriter ps = new PrintWriter(baos);
t.printStackTrace(ps);
ps.close();
if (subject == null)
{
subject = getRootCauseMessage(t);
}
addErrorMessage(account, subject, baos.toString());
}
catch (Throwable it)
{
Log.e(K9.LOG_TAG, "Could not save error message to " + account.getErrorFolderName(), it);
}
finally
{
loopCatch.set(false);
}
}
public void addErrorMessage(Account account, String subject, String body)
{
if (!K9.ENABLE_ERROR_FOLDER)
{
return;
}
if (!loopCatch.compareAndSet(false, true))
{
return;
}
try
{
if (body == null || body.length() < 1)
{
return;
}
Store localStore = account.getLocalStore();
LocalFolder localFolder = (LocalFolder)localStore.getFolder(account.getErrorFolderName());
Message[] messages = new Message[1];
MimeMessage message = new MimeMessage();
message.setBody(new TextBody(body));
message.setFlag(Flag.X_DOWNLOADED_FULL, true);
message.setSubject(subject);
long nowTime = System.currentTimeMillis();
Date nowDate = new Date(nowTime);
message.setInternalDate(nowDate);
message.addSentDate(nowDate);
message.setFrom(new Address(account.getEmail(), "K9mail internal"));
messages[0] = message;
localFolder.appendMessages(messages);
localFolder.clearMessagesOlderThan(nowTime - (15 * 60 * 1000));
}
catch (Throwable it)
{
Log.e(K9.LOG_TAG, "Could not save error message to " + account.getErrorFolderName(), it);
}
finally
{
loopCatch.set(false);
}
}
public void markAllMessagesRead(final Account account, final String folder)
{
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Marking all messages in " + account.getDescription() + ":" + folder + " as read");
List<String> args = new ArrayList<String>();
args.add(folder);
PendingCommand command = new PendingCommand();
command.command = PENDING_COMMAND_MARK_ALL_AS_READ;
command.arguments = args.toArray(EMPTY_STRING_ARRAY);
queuePendingCommand(account, command);
processPendingCommands(account);
}
public void setFlag(
final Message[] messages,
final Flag flag,
final boolean newState)
{
actOnMessages(messages, new MessageActor()
{
@Override
public void act(final Account account, final Folder folder,
final List<Message> messages)
{
String[] uids = new String[messages.size()];
for (int i = 0; i < messages.size(); i++)
{
uids[i] = messages.get(i).getUid();
}
setFlag(account, folder.getName(), uids, flag, newState);
}
});
}
public void setFlag(
final Account account,
final String folderName,
final String[] uids,
final Flag flag,
final boolean newState)
{
// TODO: put this into the background, but right now that causes odd behavior
// because the FolderMessageList doesn't have its own cache of the flag states
Folder localFolder = null;
try
{
Store localStore = account.getLocalStore();
localFolder = localStore.getFolder(folderName);
localFolder.open(OpenMode.READ_WRITE);
ArrayList<Message> messages = new ArrayList<Message>();
for (String uid : uids)
{
// Allows for re-allowing sending of messages that could not be sent
if (flag == Flag.FLAGGED && !newState
&& uid != null
&& account.getOutboxFolderName().equals(folderName))
{
sendCount.remove(uid);
}
Message msg = localFolder.getMessage(uid);
if (msg != null)
{
messages.add(msg);
}
}
localFolder.setFlags(messages.toArray(EMPTY_MESSAGE_ARRAY), new Flag[] {flag}, newState);
for (MessagingListener l : getListeners())
{
l.folderStatusChanged(account, folderName, localFolder.getUnreadMessageCount());
}
if (account.getErrorFolderName().equals(folderName))
{
return;
}
queueSetFlag(account, folderName, Boolean.toString(newState), flag.toString(), uids);
processPendingCommands(account);
}
catch (MessagingException me)
{
addErrorMessage(account, null, me);
throw new RuntimeException(me);
}
finally
{
if (localFolder != null)
{
localFolder.close();
}
}
}//setMesssageFlag
public void clearAllPending(final Account account)
{
try
{
Log.w(K9.LOG_TAG, "Clearing pending commands!");
LocalStore localStore = account.getLocalStore();
localStore.removePendingCommands();
}
catch (MessagingException me)
{
Log.e(K9.LOG_TAG, "Unable to clear pending command", me);
addErrorMessage(account, null, me);
}
}
public void loadMessageForViewRemote(final Account account, final String folder,
final String uid, final MessagingListener listener)
{
put("loadMessageForViewRemote", listener, new Runnable()
{
public void run()
{
Folder remoteFolder = null;
LocalFolder localFolder = null;
try
{
LocalStore localStore = account.getLocalStore();
localFolder = localStore.getFolder(folder);
localFolder.open(OpenMode.READ_WRITE);
Message message = localFolder.getMessage(uid);
if (message.isSet(Flag.X_DOWNLOADED_FULL))
{
/*
* If the message has been synchronized since we were called we'll
* just hand it back cause it's ready to go.
*/
FetchProfile fp = new FetchProfile();
fp.add(FetchProfile.Item.ENVELOPE);
fp.add(FetchProfile.Item.BODY);
localFolder.fetch(new Message[] { message }, fp, null);
}
else
{
/*
* At this point the message is not available, so we need to download it
* fully if possible.
*/
Store remoteStore = account.getRemoteStore();
remoteFolder = remoteStore.getFolder(folder);
remoteFolder.open(OpenMode.READ_WRITE);
// Get the remote message and fully download it
Message remoteMessage = remoteFolder.getMessage(uid);
FetchProfile fp = new FetchProfile();
fp.add(FetchProfile.Item.BODY);
remoteFolder.fetch(new Message[] { remoteMessage }, fp, null);
// Store the message locally and load the stored message into memory
localFolder.appendMessages(new Message[] { remoteMessage });
fp.add(FetchProfile.Item.ENVELOPE);
message = localFolder.getMessage(uid);
localFolder.fetch(new Message[] { message }, fp, null);
// Mark that this message is now fully synched
message.setFlag(Flag.X_DOWNLOADED_FULL, true);
}
// now that we have the full message, refresh the headers
for (MessagingListener l : getListeners(listener))
{
l.loadMessageForViewHeadersAvailable(account, folder, uid, message);
}
for (MessagingListener l : getListeners(listener))
{
l.loadMessageForViewBodyAvailable(account, folder, uid, message);
}
for (MessagingListener l : getListeners(listener))
{
l.loadMessageForViewFinished(account, folder, uid, message);
}
}
catch (Exception e)
{
for (MessagingListener l : getListeners(listener))
{
l.loadMessageForViewFailed(account, folder, uid, e);
}
addErrorMessage(account, null, e);
}
finally
{
if (remoteFolder!=null)
{
remoteFolder.close();
}
if (localFolder!=null)
{
localFolder.close();
}
}//finally
}//run
});
}
public void loadMessageForView(final Account account, final String folder, final String uid,
final MessagingListener listener)
{
for (MessagingListener l : getListeners(listener))
{
l.loadMessageForViewStarted(account, folder, uid);
}
threadPool.execute(new Runnable()
{
public void run()
{
try
{
LocalStore localStore = account.getLocalStore();
LocalFolder localFolder = localStore.getFolder(folder);
localFolder.open(OpenMode.READ_WRITE);
LocalMessage message = (LocalMessage)localFolder.getMessage(uid);
if (message==null
|| message.getId()==0)
{
throw new IllegalArgumentException("Message not found: folder=" + folder + ", uid=" + uid);
}
if (!message.isSet(Flag.SEEN))
{
message.setFlag(Flag.SEEN, true);
setFlag(new Message[] { message }, Flag.SEEN, true);
}
for (MessagingListener l : getListeners(listener))
{
l.loadMessageForViewHeadersAvailable(account, folder, uid, message);
}
FetchProfile fp = new FetchProfile();
fp.add(FetchProfile.Item.ENVELOPE);
fp.add(FetchProfile.Item.BODY);
localFolder.fetch(new Message[]
{
message
}, fp, null);
localFolder.close();
for (MessagingListener l : getListeners(listener))
{
l.loadMessageForViewBodyAvailable(account, folder, uid, message);
}
for (MessagingListener l : getListeners(listener))
{
l.loadMessageForViewFinished(account, folder, uid, message);
}
}
catch (Exception e)
{
for (MessagingListener l : getListeners(listener))
{
l.loadMessageForViewFailed(account, folder, uid, e);
}
addErrorMessage(account, null, e);
}
}
});
}
/**
* Attempts to load the attachment specified by part from the given account and message.
* @param account
* @param message
* @param part
* @param listener
*/
public void loadAttachment(
final Account account,
final Message message,
final Part part,
final Object tag,
final MessagingListener listener)
{
/*
* Check if the attachment has already been downloaded. If it has there's no reason to
* download it, so we just tell the listener that it's ready to go.
*/
try
{
if (part.getBody() != null)
{
for (MessagingListener l : getListeners())
{
l.loadAttachmentStarted(account, message, part, tag, false);
}
if (listener != null)
{
listener.loadAttachmentStarted(account, message, part, tag, false);
}
for (MessagingListener l : getListeners())
{
l.loadAttachmentFinished(account, message, part, tag);
}
if (listener != null)
{
listener.loadAttachmentFinished(account, message, part, tag);
}
return;
}
}
catch (MessagingException me)
{
/*
* If the header isn't there the attachment isn't downloaded yet, so just continue
* on.
*/
}
for (MessagingListener l : getListeners())
{
l.loadAttachmentStarted(account, message, part, tag, true);
}
if (listener != null)
{
listener.loadAttachmentStarted(account, message, part, tag, false);
}
put("loadAttachment", listener, new Runnable()
{
public void run()
{
Folder remoteFolder = null;
LocalFolder localFolder = null;
try
{
LocalStore localStore = account.getLocalStore();
/*
* We clear out any attachments already cached in the entire store and then
* we update the passed in message to reflect that there are no cached
* attachments. This is in support of limiting the account to having one
* attachment downloaded at a time.
*/
localStore.pruneCachedAttachments();
ArrayList<Part> viewables = new ArrayList<Part>();
ArrayList<Part> attachments = new ArrayList<Part>();
MimeUtility.collectParts(message, viewables, attachments);
for (Part attachment : attachments)
{
attachment.setBody(null);
}
Store remoteStore = account.getRemoteStore();
localFolder = localStore.getFolder(message.getFolder().getName());
remoteFolder = remoteStore.getFolder(message.getFolder().getName());
remoteFolder.open(OpenMode.READ_WRITE);
//FIXME: This is an ugly hack that won't be needed once the Message objects have been united.
Message remoteMessage = remoteFolder.getMessage(message.getUid());
remoteMessage.setBody(message.getBody());
remoteFolder.fetchPart(remoteMessage, part, null);
localFolder.updateMessage((LocalMessage)message);
for (MessagingListener l : getListeners())
{
l.loadAttachmentFinished(account, message, part, tag);
}
if (listener != null)
{
listener.loadAttachmentFinished(account, message, part, tag);
}
}
catch (MessagingException me)
{
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Exception loading attachment", me);
for (MessagingListener l : getListeners())
{
l.loadAttachmentFailed(account, message, part, tag, me.getMessage());
}
if (listener != null)
{
listener.loadAttachmentFailed(account, message, part, tag, me.getMessage());
}
addErrorMessage(account, null, me);
}
finally
{
if (remoteFolder != null)
{
remoteFolder.close();
}
if (localFolder != null)
{
localFolder.close();
}
}
}
});
}
/**
* Stores the given message in the Outbox and starts a sendPendingMessages command to
* attempt to send the message.
* @param account
* @param message
* @param listener
*/
public void sendMessage(final Account account,
final Message message,
MessagingListener listener)
{
try
{
LocalStore localStore = account.getLocalStore();
LocalFolder localFolder = localStore.getFolder(account.getOutboxFolderName());
localFolder.open(OpenMode.READ_WRITE);
localFolder.appendMessages(new Message[]
{
message
});
Message localMessage = localFolder.getMessage(message.getUid());
localMessage.setFlag(Flag.X_DOWNLOADED_FULL, true);
localFolder.close();
sendPendingMessages(account, listener);
}
catch (Exception e)
{
/*
for (MessagingListener l : getListeners())
{
// TODO general failed
}
*/
addErrorMessage(account, null, e);
}
}
public void sendPendingMessages(MessagingListener listener)
{
final Preferences prefs = Preferences.getPreferences(mApplication.getApplicationContext());
Account[] accounts = prefs.getAccounts();
for (Account account : accounts)
{
sendPendingMessages(account, listener);
}
}
/**
* Attempt to send any messages that are sitting in the Outbox.
* @param account
* @param listener
*/
public void sendPendingMessages(final Account account,
MessagingListener listener)
{
putBackground("sendPendingMessages", listener, new Runnable()
{
public void run()
{
if (!account.isAvailable(mApplication))
{
throw new UnavailableAccountException();
}
if (messagesPendingSend(account))
{
NotificationManager notifMgr =
(NotificationManager)mApplication.getSystemService(Context.NOTIFICATION_SERVICE);
if (account.isShowOngoing())
{
Notification notif = new Notification(R.drawable.ic_menu_refresh,
mApplication.getString(R.string.notification_bg_send_ticker, account.getDescription()), System.currentTimeMillis());
Intent intent = MessageList.actionHandleFolderIntent(mApplication, account, K9.INBOX);
PendingIntent pi = PendingIntent.getActivity(mApplication, 0, intent, 0);
notif.setLatestEventInfo(mApplication, mApplication.getString(R.string.notification_bg_send_title),
account.getDescription() , pi);
notif.flags = Notification.FLAG_ONGOING_EVENT;
if (K9.NOTIFICATION_LED_WHILE_SYNCING)
{
notif.flags |= Notification.FLAG_SHOW_LIGHTS;
notif.ledARGB = account.getNotificationSetting().getLedColor();
notif.ledOnMS = K9.NOTIFICATION_LED_FAST_ON_TIME;
notif.ledOffMS = K9.NOTIFICATION_LED_FAST_OFF_TIME;
}
notifMgr.notify(K9.FETCHING_EMAIL_NOTIFICATION - account.getAccountNumber(), notif);
}
try
{
sendPendingMessagesSynchronous(account);
}
finally
{
if (account.isShowOngoing())
{
notifMgr.cancel(K9.FETCHING_EMAIL_NOTIFICATION - account.getAccountNumber());
}
}
}
}
});
}
public boolean messagesPendingSend(final Account account)
{
Folder localFolder = null;
try
{
localFolder = account.getLocalStore().getFolder(
account.getOutboxFolderName());
if (!localFolder.exists())
{
return false;
}
localFolder.open(OpenMode.READ_WRITE);
if (localFolder.getMessageCount() > 0)
{
return true;
}
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Exception while checking for unsent messages", e);
}
finally
{
if (localFolder != null)
{
localFolder.close();
}
}
return false;
}
/**
* Attempt to send any messages that are sitting in the Outbox.
* @param account
* @param listener
*/
public void sendPendingMessagesSynchronous(final Account account)
{
Folder localFolder = null;
try
{
Store localStore = account.getLocalStore();
localFolder = localStore.getFolder(
account.getOutboxFolderName());
if (!localFolder.exists())
{
return;
}
for (MessagingListener l : getListeners())
{
l.sendPendingMessagesStarted(account);
}
localFolder.open(OpenMode.READ_WRITE);
Message[] localMessages = localFolder.getMessages(null);
Exception lastFailure = null;
int progress = 0;
int todo = localMessages.length;
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxProgress(account, account.getSentFolderName(), progress, todo);
}
/*
* The profile we will use to pull all of the content
* for a given local message into memory for sending.
*/
FetchProfile fp = new FetchProfile();
fp.add(FetchProfile.Item.ENVELOPE);
fp.add(FetchProfile.Item.BODY);
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Scanning folder '" + account.getOutboxFolderName() + "' (" + ((LocalFolder)localFolder).getId() + ") for messages to send");
Transport transport = Transport.getInstance(account);
for (Message message : localMessages)
{
if (message.isSet(Flag.DELETED))
{
message.destroy();
continue;
}
try
{
AtomicInteger count = new AtomicInteger(0);
AtomicInteger oldCount = sendCount.putIfAbsent(message.getUid(), count);
if (oldCount != null)
{
count = oldCount;
}
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Send count for message " + message.getUid() + " is " + count.get());
localFolder.fetch(new Message[] { message }, fp, null);
try
{
message.setFlag(Flag.X_SEND_IN_PROGRESS, true);
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Sending message with UID " + message.getUid());
transport.sendMessage(message);
message.setFlag(Flag.X_SEND_IN_PROGRESS, false);
message.setFlag(Flag.SEEN, true);
progress++;
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxProgress(account, account.getSentFolderName(), progress, todo);
}
if (K9.FOLDER_NONE.equals(account.getSentFolderName()))
{
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Sent folder set to " + K9.FOLDER_NONE + ", deleting sent message");
message.setFlag(Flag.DELETED, true);
}
else
{
LocalFolder localSentFolder = (LocalFolder) localStore.getFolder( account.getSentFolderName());
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Moving sent message to folder '" + account.getSentFolderName() + "' (" + localSentFolder.getId() + ") ");
localFolder.moveMessages( new Message[] { message }, localSentFolder);
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Moved sent message to folder '" + account.getSentFolderName() + "' (" + localSentFolder.getId() + ") ");
PendingCommand command = new PendingCommand();
command.command = PENDING_COMMAND_APPEND;
command.arguments = new String[] { localSentFolder.getName(), message.getUid() };
queuePendingCommand(account, command);
processPendingCommands(account);
}
}
catch (Exception e)
{
message.setFlag(Flag.X_SEND_FAILED, true);
Log.e(K9.LOG_TAG, "Failed to send message", e);
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxFailed( account, localFolder.getName(), getRootCauseMessage(e));
}
lastFailure = e;
}
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Failed to fetch message for sending", e);
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxFailed( account, localFolder.getName(), getRootCauseMessage(e));
}
lastFailure = e;
}
}
if (localFolder.getMessageCount() == 0)
{
// No longer delete the empty local outbox every time we finish sending mail
// There's no real win to it and it makes the folder selection UI extra stupid
// (We'd need a textentry widget to set the Outbox folder rather than a folder select widget)
// localFolder.delete(false);
}
for (MessagingListener l : getListeners())
{
l.sendPendingMessagesCompleted(account);
}
if (lastFailure != null)
{
NotificationManager notifMgr = (NotificationManager)mApplication.getSystemService(Context.NOTIFICATION_SERVICE);
Notification notif = new Notification(R.drawable.stat_notify_email_generic, mApplication.getString(R.string.send_failure_subject), System.currentTimeMillis());
Intent i = FolderList.actionHandleNotification(mApplication, account, account.getOutboxFolderName());
PendingIntent pi = PendingIntent.getActivity(mApplication, 0, i, 0);
notif.setLatestEventInfo(mApplication, mApplication.getString(R.string.send_failure_subject), lastFailure.getMessage(), pi);
notif.flags |= Notification.FLAG_SHOW_LIGHTS;
notif.flags |= Notification.FLAG_AUTO_CANCEL;
notif.ledARGB = K9.NOTIFICATION_LED_SENDING_FAILURE_COLOR;
notif.ledOnMS = K9.NOTIFICATION_LED_FAST_ON_TIME;
notif.ledOffMS = K9.NOTIFICATION_LED_FAST_OFF_TIME;
notifMgr.notify(-1500 - account.getAccountNumber(), notif);
}
}
catch (UnavailableStorageException e)
{
Log.i(K9.LOG_TAG, "Failed to send pending messages because storage is not available - trying again later.");
throw new UnavailableAccountException(e);
}
catch (Exception e)
{
for (MessagingListener l : getListeners())
{
l.sendPendingMessagesFailed(account);
}
addErrorMessage(account, null, e);
}
finally
{
if (localFolder != null)
{
try
{
localFolder.close();
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Exception while closing folder", e);
}
}
}
}
public void getAccountStats(final Context context, final Account account,
final MessagingListener l)
{
Runnable unreadRunnable = new Runnable()
{
public void run()
{
try
{
AccountStats stats = account.getStats(context);
l.accountStatusChanged(account, stats);
}
catch (MessagingException me)
{
Log.e(K9.LOG_TAG, "Count not get unread count for account " + account.getDescription(),
me);
}
}
};
put("getAccountStats:" + account.getDescription(), l, unreadRunnable);
}
public void getFolderUnreadMessageCount(final Account account, final String folderName,
final MessagingListener l)
{
Runnable unreadRunnable = new Runnable()
{
public void run()
{
int unreadMessageCount = 0;
try
{
Folder localFolder = account.getLocalStore().getFolder(folderName);
unreadMessageCount = localFolder.getUnreadMessageCount();
}
catch (MessagingException me)
{
Log.e(K9.LOG_TAG, "Count not get unread count for account " + account.getDescription(), me);
}
l.folderStatusChanged(account, folderName, unreadMessageCount);
}
};
put("getFolderUnread:" + account.getDescription() + ":" + folderName, l, unreadRunnable);
}
public boolean isMoveCapable(Message message)
{
return !message.getUid().startsWith(K9.LOCAL_UID_PREFIX);
}
public boolean isCopyCapable(Message message)
{
return isMoveCapable(message);
}
public boolean isMoveCapable(final Account account)
{
try
{
Store localStore = account.getLocalStore();
Store remoteStore = account.getRemoteStore();
return localStore.isMoveCapable() && remoteStore.isMoveCapable();
}
catch (MessagingException me)
{
Log.e(K9.LOG_TAG, "Exception while ascertaining move capability", me);
return false;
}
}
public boolean isCopyCapable(final Account account)
{
try
{
Store localStore = account.getLocalStore();
Store remoteStore = account.getRemoteStore();
return localStore.isCopyCapable() && remoteStore.isCopyCapable();
}
catch (MessagingException me)
{
Log.e(K9.LOG_TAG, "Exception while ascertaining copy capability", me);
return false;
}
}
public void moveMessages(final Account account, final String srcFolder, final Message[] messages, final String destFolder,
final MessagingListener listener)
{
for (Message message : messages)
{
suppressMessage(account, srcFolder, message);
}
putBackground("moveMessages", null, new Runnable()
{
public void run()
{
moveOrCopyMessageSynchronous(account, srcFolder, messages, destFolder, false, listener);
}
});
}
public void moveMessage(final Account account, final String srcFolder, final Message message, final String destFolder,
final MessagingListener listener)
{
moveMessages(account, srcFolder, new Message[] { message }, destFolder, listener);
}
public void copyMessages(final Account account, final String srcFolder, final Message[] messages, final String destFolder,
final MessagingListener listener)
{
putBackground("copyMessages", null, new Runnable()
{
public void run()
{
moveOrCopyMessageSynchronous(account, srcFolder, messages, destFolder, true, listener);
}
});
}
public void copyMessage(final Account account, final String srcFolder, final Message message, final String destFolder,
final MessagingListener listener)
{
copyMessages(account, srcFolder, new Message[] { message }, destFolder, listener);
}
private void moveOrCopyMessageSynchronous(final Account account, final String srcFolder, final Message[] inMessages,
final String destFolder, final boolean isCopy, MessagingListener listener)
{
try
{
Store localStore = account.getLocalStore();
Store remoteStore = account.getRemoteStore();
if (!isCopy && (!remoteStore.isMoveCapable() || !localStore.isMoveCapable()))
{
return;
}
if (isCopy && (!remoteStore.isCopyCapable() || !localStore.isCopyCapable()))
{
return;
}
Folder localSrcFolder = localStore.getFolder(srcFolder);
Folder localDestFolder = localStore.getFolder(destFolder);
List<String> uids = new LinkedList<String>();
for (Message message : inMessages)
{
String uid = message.getUid();
if (!uid.startsWith(K9.LOCAL_UID_PREFIX))
{
uids.add(uid);
}
}
Message[] messages = localSrcFolder.getMessages(uids.toArray(EMPTY_STRING_ARRAY), null);
if (messages.length > 0)
{
Map<String, Message> origUidMap = new HashMap<String, Message>();
for (Message message : messages)
{
origUidMap.put(message.getUid(), message);
}
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "moveOrCopyMessageSynchronous: source folder = " + srcFolder
+ ", " + messages.length + " messages, " + ", destination folder = " + destFolder + ", isCopy = " + isCopy);
if (isCopy)
{
FetchProfile fp = new FetchProfile();
fp.add(FetchProfile.Item.ENVELOPE);
fp.add(FetchProfile.Item.BODY);
localSrcFolder.fetch(messages, fp, null);
localSrcFolder.copyMessages(messages, localDestFolder);
}
else
{
localSrcFolder.moveMessages(messages, localDestFolder);
for (String origUid : origUidMap.keySet())
{
for (MessagingListener l : getListeners())
{
l.messageUidChanged(account, srcFolder, origUid, origUidMap.get(origUid).getUid());
}
unsuppressMessage(account, srcFolder, origUid);
}
}
queueMoveOrCopy(account, srcFolder, destFolder, isCopy, origUidMap.keySet().toArray(EMPTY_STRING_ARRAY));
}
processPendingCommands(account);
}
catch (UnavailableStorageException e)
{
Log.i(K9.LOG_TAG, "Failed to move/copy message because storage is not available - trying again later.");
throw new UnavailableAccountException(e);
}
catch (MessagingException me)
{
addErrorMessage(account, null, me);
throw new RuntimeException("Error moving message", me);
}
}
public void expunge(final Account account, final String folder, final MessagingListener listener)
{
putBackground("expunge", null, new Runnable()
{
public void run()
{
queueExpunge(account, folder);
}
});
}
public void deleteDraft(final Account account, String uid)
{
LocalFolder localFolder = null;
try
{
LocalStore localStore = account.getLocalStore();
localFolder = localStore.getFolder(account.getDraftsFolderName());
localFolder.open(OpenMode.READ_WRITE);
Message message = localFolder.getMessage(uid);
if (message != null)
{
deleteMessages(new Message[] { message }, null);
}
}
catch (MessagingException me)
{
addErrorMessage(account, null, me);
}
finally
{
if (localFolder != null)
{
localFolder.close();
}
}
}
public void deleteMessages(final Message[] messages, final MessagingListener listener)
{
actOnMessages(messages, new MessageActor()
{
@Override
public void act(final Account account, final Folder folder,
final List<Message> messages)
{
for (Message message : messages)
{
suppressMessage(account, folder.getName(), message);
}
putBackground("deleteMessages", null, new Runnable()
{
public void run()
{
deleteMessagesSynchronous(account, folder.getName(), messages.toArray(EMPTY_MESSAGE_ARRAY), listener);
}
});
}
});
}
private void deleteMessagesSynchronous(final Account account, final String folder, final Message[] messages,
MessagingListener listener)
{
Folder localFolder = null;
Folder localTrashFolder = null;
String[] uids = getUidsFromMessages(messages);
try
{
//We need to make these callbacks before moving the messages to the trash
//as messages get a new UID after being moved
for (Message message : messages)
{
if (listener != null)
{
listener.messageDeleted(account, folder, message);
}
for (MessagingListener l : getListeners())
{
l.messageDeleted(account, folder, message);
}
}
Store localStore = account.getLocalStore();
localFolder = localStore.getFolder(folder);
if (folder.equals(account.getTrashFolderName()) || K9.FOLDER_NONE.equals(account.getTrashFolderName()))
{
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "Deleting messages in trash folder or trash set to -None-, not copying");
localFolder.setFlags(messages, new Flag[] { Flag.DELETED }, true);
}
else
{
localTrashFolder = localStore.getFolder(account.getTrashFolderName());
if (!localTrashFolder.exists())
{
localTrashFolder.create(Folder.FolderType.HOLDS_MESSAGES);
}
if (localTrashFolder.exists())
{
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "Deleting messages in normal folder, moving");
localFolder.moveMessages(messages, localTrashFolder);
}
}
for (MessagingListener l : getListeners())
{
l.folderStatusChanged(account, folder, localFolder.getUnreadMessageCount());
if (localTrashFolder != null)
{
l.folderStatusChanged(account, account.getTrashFolderName(), localTrashFolder.getUnreadMessageCount());
}
}
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "Delete policy for account " + account.getDescription() + " is " + account.getDeletePolicy());
if (folder.equals(account.getOutboxFolderName()))
{
for (Message message : messages)
{
// If the message was in the Outbox, then it has been copied to local Trash, and has
// to be copied to remote trash
PendingCommand command = new PendingCommand();
command.command = PENDING_COMMAND_APPEND;
command.arguments =
new String[]
{
account.getTrashFolderName(),
message.getUid()
};
queuePendingCommand(account, command);
}
processPendingCommands(account);
}
else if ( account.getDeletePolicy() == Account.DELETE_POLICY_ON_DELETE)
{
if (folder.equals(account.getTrashFolderName()))
{
queueSetFlag(account, folder, Boolean.toString(true), Flag.DELETED.toString(), uids);
}
else
{
queueMoveOrCopy(account, folder, account.getTrashFolderName(), false, uids);
}
processPendingCommands(account);
}
else if (account.getDeletePolicy() == Account.DELETE_POLICY_MARK_AS_READ)
{
queueSetFlag(account, folder, Boolean.toString(true), Flag.SEEN.toString(), uids);
processPendingCommands(account);
}
else
{
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "Delete policy " + account.getDeletePolicy() + " prevents delete from server");
}
for (String uid : uids)
{
unsuppressMessage(account, folder, uid);
}
}
catch (UnavailableStorageException e)
{
Log.i(K9.LOG_TAG, "Failed to delete message because storage is not available - trying again later.");
throw new UnavailableAccountException(e);
}
catch (MessagingException me)
{
addErrorMessage(account, null, me);
throw new RuntimeException("Error deleting message from local store.", me);
}
finally
{
if (localFolder != null)
{
localFolder.close();
}
if (localTrashFolder != null)
{
localTrashFolder.close();
}
}
}
private String[] getUidsFromMessages(Message[] messages)
{
String[] uids = new String[messages.length];
for (int i = 0; i < messages.length; i++)
{
uids[i] = messages[i].getUid();
}
return uids;
}
private void processPendingEmptyTrash(PendingCommand command, Account account) throws MessagingException
{
Store remoteStore = account.getRemoteStore();
Folder remoteFolder = remoteStore.getFolder(account.getTrashFolderName());
try
{
if (remoteFolder.exists())
{
remoteFolder.open(OpenMode.READ_WRITE);
remoteFolder.setFlags(new Flag [] { Flag.DELETED }, true);
if (Account.EXPUNGE_IMMEDIATELY.equals(account.getExpungePolicy()))
{
remoteFolder.expunge();
}
}
}
finally
{
if (remoteFolder != null)
{
remoteFolder.close();
}
}
}
public void emptyTrash(final Account account, MessagingListener listener)
{
putBackground("emptyTrash", listener, new Runnable()
{
public void run()
{
Folder localFolder = null;
try
{
Store localStore = account.getLocalStore();
localFolder = localStore.getFolder(account.getTrashFolderName());
localFolder.open(OpenMode.READ_WRITE);
localFolder.setFlags(new Flag[] { Flag.DELETED }, true);
for (MessagingListener l : getListeners())
{
l.emptyTrashCompleted(account);
}
List<String> args = new ArrayList<String>();
PendingCommand command = new PendingCommand();
command.command = PENDING_COMMAND_EMPTY_TRASH;
command.arguments = args.toArray(EMPTY_STRING_ARRAY);
queuePendingCommand(account, command);
processPendingCommands(account);
}
catch (UnavailableStorageException e)
{
Log.i(K9.LOG_TAG, "Failed to empty trash because storage is not available - trying again later.");
throw new UnavailableAccountException(e);
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "emptyTrash failed", e);
addErrorMessage(account, null, e);
}
finally
{
if (localFolder != null)
{
localFolder.close();
}
}
}
});
}
public void sendAlternate(final Context context, Account account, Message message)
{
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "About to load message " + account.getDescription() + ":" + message.getFolder().getName()
+ ":" + message.getUid() + " for sendAlternate");
loadMessageForView(account, message.getFolder().getName(),
message.getUid(), new MessagingListener()
{
@Override
public void loadMessageForViewBodyAvailable(Account account, String folder, String uid,
Message message)
{
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "Got message " + account.getDescription() + ":" + folder
+ ":" + message.getUid() + " for sendAlternate");
try
{
Intent msg=new Intent(Intent.ACTION_SEND);
String quotedText = null;
Part part = MimeUtility.findFirstPartByMimeType(message,
"text/plain");
if (part == null)
{
part = MimeUtility.findFirstPartByMimeType(message, "text/html");
}
if (part != null)
{
quotedText = MimeUtility.getTextFromPart(part);
}
if (quotedText != null)
{
msg.putExtra(Intent.EXTRA_TEXT, quotedText);
}
msg.putExtra(Intent.EXTRA_SUBJECT, "Fwd: " + message.getSubject());
msg.setType("text/plain");
context.startActivity(Intent.createChooser(msg, context.getString(R.string.send_alternate_chooser_title)));
}
catch (MessagingException me)
{
Log.e(K9.LOG_TAG, "Unable to send email through alternate program", me);
}
}
});
}
/**
* Checks mail for one or multiple accounts. If account is null all accounts
* are checked.
*
* @param context
* @param account
* @param listener
*/
public void checkMail(final Context context, final Account account,
final boolean ignoreLastCheckedTime,
final boolean useManualWakeLock,
final MessagingListener listener)
{
TracingWakeLock twakeLock = null;
if (useManualWakeLock)
{
TracingPowerManager pm = TracingPowerManager.getPowerManager(context);
twakeLock = pm.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, "K9 MessagingController.checkMail");
twakeLock.setReferenceCounted(false);
twakeLock.acquire(K9.MANUAL_WAKE_LOCK_TIMEOUT);
}
final TracingWakeLock wakeLock = twakeLock;
for (MessagingListener l : getListeners())
{
l.checkMailStarted(context, account);
}
putBackground("checkMail", listener, new Runnable()
{
public void run()
{
final NotificationManager notifMgr = (NotificationManager)context
.getSystemService(Context.NOTIFICATION_SERVICE);
try
{
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Starting mail check");
Preferences prefs = Preferences.getPreferences(context);
Account[] accounts;
if (account != null)
{
accounts = new Account[]
{
account
};
}
else
{
accounts = prefs.getAccounts();
}
for (final Account account : accounts)
{
if (!account.isAvailable(context))
{
if (K9.DEBUG)
{
Log.i(K9.LOG_TAG, "Skipping synchronizing unavailable account " + account.getDescription());
}
continue;
}
final long accountInterval = account.getAutomaticCheckIntervalMinutes() * 60 * 1000;
if (!ignoreLastCheckedTime && accountInterval <= 0)
{
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Skipping synchronizing account " + account.getDescription());
continue;
}
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Synchronizing account " + account.getDescription());
account.setRingNotified(false);
sendPendingMessages(account, listener);
try
{
Account.FolderMode aDisplayMode = account.getFolderDisplayMode();
Account.FolderMode aSyncMode = account.getFolderSyncMode();
Store localStore = account.getLocalStore();
for (final Folder folder : localStore.getPersonalNamespaces(false))
{
folder.open(Folder.OpenMode.READ_WRITE);
folder.refresh(prefs);
Folder.FolderClass fDisplayClass = folder.getDisplayClass();
Folder.FolderClass fSyncClass = folder.getSyncClass();
if (modeMismatch(aDisplayMode, fDisplayClass))
{
// Never sync a folder that isn't displayed
/*
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Not syncing folder " + folder.getName() +
" which is in display mode " + fDisplayClass + " while account is in display mode " + aDisplayMode);
*/
continue;
}
if (modeMismatch(aSyncMode, fSyncClass))
{
// Do not sync folders in the wrong class
/*
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Not syncing folder " + folder.getName() +
" which is in sync mode " + fSyncClass + " while account is in sync mode " + aSyncMode);
*/
continue;
}
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Folder " + folder.getName() + " was last synced @ " +
new Date(folder.getLastChecked()));
if (!ignoreLastCheckedTime && folder.getLastChecked() >
(System.currentTimeMillis() - accountInterval))
{
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Not syncing folder " + folder.getName()
+ ", previously synced @ " + new Date(folder.getLastChecked())
+ " which would be too recent for the account period");
continue;
}
putBackground("sync" + folder.getName(), null, new Runnable()
{
public void run()
{
LocalFolder tLocalFolder = null;
try
{
// In case multiple Commands get enqueued, don't run more than
// once
final LocalStore localStore = account.getLocalStore();
tLocalFolder = localStore.getFolder(folder.getName());
tLocalFolder.open(Folder.OpenMode.READ_WRITE);
if (!ignoreLastCheckedTime && tLocalFolder.getLastChecked() >
(System.currentTimeMillis() - accountInterval))
{
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Not running Command for folder " + folder.getName()
+ ", previously synced @ " + new Date(folder.getLastChecked())
+ " which would be too recent for the account period");
return;
}
if (account.isShowOngoing())
{
Notification notif = new Notification(R.drawable.ic_menu_refresh,
context.getString(R.string.notification_bg_sync_ticker, account.getDescription(), folder.getName()),
System.currentTimeMillis());
Intent intent = MessageList.actionHandleFolderIntent(context, account, K9.INBOX);
PendingIntent pi = PendingIntent.getActivity(context, 0, intent, 0);
notif.setLatestEventInfo(context, context.getString(R.string.notification_bg_sync_title), account.getDescription()
+ context.getString(R.string.notification_bg_title_separator) + folder.getName(), pi);
notif.flags = Notification.FLAG_ONGOING_EVENT;
if (K9.NOTIFICATION_LED_WHILE_SYNCING)
{
notif.flags |= Notification.FLAG_SHOW_LIGHTS;
notif.ledARGB = account.getNotificationSetting().getLedColor();
notif.ledOnMS = K9.NOTIFICATION_LED_FAST_ON_TIME;
notif.ledOffMS = K9.NOTIFICATION_LED_FAST_OFF_TIME;
}
notifMgr.notify(K9.FETCHING_EMAIL_NOTIFICATION - account.getAccountNumber(), notif);
}
try
{
synchronizeMailboxSynchronous(account, folder.getName(), listener, null);
}
finally
{
if (account.isShowOngoing())
{
notifMgr.cancel(K9.FETCHING_EMAIL_NOTIFICATION - account.getAccountNumber());
}
}
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Exception while processing folder " +
account.getDescription() + ":" + folder.getName(), e);
addErrorMessage(account, null, e);
}
finally
{
if (tLocalFolder != null)
{
tLocalFolder.close();
}
}
}
}
);
}
}
catch (MessagingException e)
{
Log.e(K9.LOG_TAG, "Unable to synchronize account " + account.getName(), e);
addErrorMessage(account, null, e);
}
finally
{
putBackground("clear notification flag for " + account.getDescription(), null, new Runnable()
{
public void run()
{
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Clearing notification flag for " + account.getDescription());
account.setRingNotified(false);
try
{
AccountStats stats = account.getStats(context);
if (stats == null || stats.unreadMessageCount == 0)
{
notifyAccountCancel(context, account);
}
}
catch (MessagingException e)
{
Log.e(K9.LOG_TAG, "Unable to getUnreadMessageCount for account: " + account, e);
}
}
}
);
}
}
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Unable to synchronize mail", e);
addErrorMessage(account, null, e);
}
putBackground("finalize sync", null, new Runnable()
{
public void run()
{
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Finished mail sync");
if (wakeLock != null)
{
wakeLock.release();
}
for (MessagingListener l : getListeners())
{
l.checkMailFinished(context, account);
}
}
}
);
}
});
}
public void compact(final Account account, final MessagingListener ml)
{
putBackground("compact:" + account.getDescription(), ml, new Runnable()
{
public void run()
{
try
{
LocalStore localStore = account.getLocalStore();
long oldSize = localStore.getSize();
localStore.compact();
long newSize = localStore.getSize();
if (ml != null)
{
ml.accountSizeChanged(account, oldSize, newSize);
}
for (MessagingListener l : getListeners())
{
l.accountSizeChanged(account, oldSize, newSize);
}
}
catch (UnavailableStorageException e)
{
Log.i(K9.LOG_TAG, "Failed to compact account because storage is not available - trying again later.");
throw new UnavailableAccountException(e);
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Failed to compact account " + account.getDescription(), e);
}
}
});
}
public void clear(final Account account, final MessagingListener ml)
{
putBackground("clear:" + account.getDescription(), ml, new Runnable()
{
public void run()
{
try
{
LocalStore localStore = account.getLocalStore();
long oldSize = localStore.getSize();
localStore.clear();
localStore.resetVisibleLimits(account.getDisplayCount());
long newSize = localStore.getSize();
AccountStats stats = new AccountStats();
stats.size = newSize;
stats.unreadMessageCount = 0;
stats.flaggedMessageCount = 0;
if (ml != null)
{
ml.accountSizeChanged(account, oldSize, newSize);
ml.accountStatusChanged(account, stats);
}
for (MessagingListener l : getListeners())
{
l.accountSizeChanged(account, oldSize, newSize);
l.accountStatusChanged(account, stats);
}
}
catch (UnavailableStorageException e)
{
Log.i(K9.LOG_TAG, "Failed to clear account because storage is not available - trying again later.");
throw new UnavailableAccountException(e);
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Failed to clear account " + account.getDescription(), e);
}
}
});
}
public void recreate(final Account account, final MessagingListener ml)
{
putBackground("recreate:" + account.getDescription(), ml, new Runnable()
{
public void run()
{
try
{
LocalStore localStore = account.getLocalStore();
long oldSize = localStore.getSize();
localStore.recreate();
localStore.resetVisibleLimits(account.getDisplayCount());
long newSize = localStore.getSize();
AccountStats stats = new AccountStats();
stats.size = newSize;
stats.unreadMessageCount = 0;
stats.flaggedMessageCount = 0;
if (ml != null)
{
ml.accountSizeChanged(account, oldSize, newSize);
ml.accountStatusChanged(account, stats);
}
for (MessagingListener l : getListeners())
{
l.accountSizeChanged(account, oldSize, newSize);
l.accountStatusChanged(account, stats);
}
}
catch (UnavailableStorageException e)
{
Log.i(K9.LOG_TAG, "Failed to recreate an account because storage is not available - trying again later.");
throw new UnavailableAccountException(e);
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Failed to recreate account " + account.getDescription(), e);
}
}
});
}
private boolean shouldNotifyForMessage(Account account, Message message)
{
// Do not notify if the user does not have notifications
// enabled or if the message has been read
if (!account.isNotifyNewMail() || message.isSet(Flag.SEEN) || (account.getName() == null))
{
return false;
}
Folder folder = message.getFolder();
if (folder != null)
{
// No notification for new messages in Trash, Drafts, or Sent folder.
// But do notify if it's the INBOX (see issue 1817).
String folderName = folder.getName();
if (!K9.INBOX.equals(folderName) &&
(account.getTrashFolderName().equals(folderName)
|| account.getDraftsFolderName().equals(folderName)
|| account.getSentFolderName().equals(folderName)))
{
return false;
}
}
return true;
}
/** Creates a notification of new email messages
* ringtone, lights, and vibration to be played
*/
private boolean notifyAccount(Context context, Account account, Message message, int previousUnreadMessageCount, AtomicInteger newMessageCount)
{
// If we don't even have an account name, don't show the notification
// (This happens during initial account setup)
//
if (account.getName() == null)
{
return false;
}
// If we have a message, set the notification to "<From>: <Subject>"
StringBuilder messageNotice = new StringBuilder();
final KeyguardManager keyguardService = (KeyguardManager) context.getSystemService(Context.KEYGUARD_SERVICE);
try
{
if (message != null && message.getFrom() != null)
{
Address[] fromAddrs = message.getFrom();
String from = fromAddrs.length > 0 ? fromAddrs[0].toFriendly().toString() : null;
String subject = message.getSubject();
if (subject == null)
{
subject = context.getString(R.string.general_no_subject);
}
if (from != null)
{
// Show From: address by default
if (!account.isAnIdentity(fromAddrs))
{
messageNotice.append(from + ": " + subject);
}
// show To: if the message was sent from me
else
{
if (!account.isNotifySelfNewMail())
{
return false;
}
Address[] rcpts = message.getRecipients(Message.RecipientType.TO);
String to = rcpts.length > 0 ? rcpts[0].toFriendly().toString() : null;
if (to != null)
{
messageNotice.append(String.format(context.getString(R.string.message_to_fmt), to) +": "+subject);
}
else
{
messageNotice.append(context.getString(R.string.general_no_sender) + ": "+subject);
}
}
}
}
}
catch (MessagingException e)
{
Log.e(K9.LOG_TAG, "Unable to get message information for notification.", e);
}
// If privacy mode active and keyguard active
// OR
// If we could not set a per-message notification, revert to a default message
if ((K9.keyguardPrivacy() && keyguardService.inKeyguardRestrictedInputMode()) || messageNotice.length() == 0)
{
messageNotice = new StringBuilder(context.getString(R.string.notification_new_title));
}
NotificationManager notifMgr =
(NotificationManager)context.getSystemService(Context.NOTIFICATION_SERVICE);
Notification notif = new Notification(R.drawable.stat_notify_email_generic, messageNotice, System.currentTimeMillis());
notif.number = previousUnreadMessageCount + newMessageCount.get();
Intent i = FolderList.actionHandleNotification(context, account, message.getFolder().getName());
PendingIntent pi = PendingIntent.getActivity(context, 0, i, 0);
String accountNotice = context.getString(R.string.notification_new_one_account_fmt, notif.number, account.getDescription());
notif.setLatestEventInfo(context, accountNotice, messageNotice, pi);
// Only ring or vibrate if we have not done so already on this
// account and fetch
boolean ringAndVibrate = false;
if (!account.isRingNotified())
{
account.setRingNotified(true);
ringAndVibrate = true;
}
configureNotification(account.getNotificationSetting(), notif, ringAndVibrate);
notifMgr.notify(account.getAccountNumber(), notif);
return true;
}
/**
* @param setting
* Configuration template. Never <code>null</code>.
* @param notification
* Object to configure. Never <code>null</code>.
* @param ringAndVibrate
* <code>true</code> if ringtone/vibration are allowed,
* <code>false</code> otherwise.
*/
private void configureNotification(final NotificationSetting setting, final Notification notification, final boolean ringAndVibrate)
{
if (ringAndVibrate)
{
if (setting.shouldRing())
{
String ringtone = setting.getRingtone();
notification.sound = TextUtils.isEmpty(ringtone) ? null : Uri.parse(ringtone);
notification.audioStreamType = AudioManager.STREAM_NOTIFICATION;
}
if (setting.isVibrate())
{
long[] pattern = getVibratePattern(setting.getVibratePattern(), setting.getVibrateTimes());
notification.vibrate = pattern;
}
}
if (setting.isLed())
{
notification.flags |= Notification.FLAG_SHOW_LIGHTS;
notification.ledARGB = setting.getLedColor();
notification.ledOnMS = K9.NOTIFICATION_LED_ON_TIME;
notification.ledOffMS = K9.NOTIFICATION_LED_OFF_TIME;
}
}
/*
* Fetch a vibration pattern.
*
* @param vibratePattern Vibration pattern index to use.
* @param vibrateTimes Number of times to do the vibration pattern.
* @return Pattern multiplied by the number of times requested.
*/
public static long[] getVibratePattern(int vibratePattern, int vibrateTimes)
{
// These are "off, on" patterns, specified in milliseconds
long[] pattern0 = new long[] {300,200}; // like the default pattern
long[] pattern1 = new long[] {100,200};
long[] pattern2 = new long[] {100,500};
long[] pattern3 = new long[] {200,200};
long[] pattern4 = new long[] {200,500};
long[] pattern5 = new long[] {500,500};
long[] selectedPattern = pattern0; //default pattern
switch (vibratePattern)
{
case 1:
selectedPattern = pattern1;
break;
case 2:
selectedPattern = pattern2;
break;
case 3:
selectedPattern = pattern3;
break;
case 4:
selectedPattern = pattern4;
break;
case 5:
selectedPattern = pattern5;
break;
}
long[] repeatedPattern = new long[selectedPattern.length * vibrateTimes];
for (int n = 0; n < vibrateTimes; n++)
{
System.arraycopy(selectedPattern, 0, repeatedPattern, n * selectedPattern.length, selectedPattern.length);
}
// Do not wait before starting the vibration pattern.
repeatedPattern[0] = 0;
return repeatedPattern;
}
/** Cancel a notification of new email messages */
public void notifyAccountCancel(Context context, Account account)
{
NotificationManager notifMgr =
(NotificationManager)context.getSystemService(Context.NOTIFICATION_SERVICE);
notifMgr.cancel(account.getAccountNumber());
notifMgr.cancel(-1000 - account.getAccountNumber());
}
public Message saveDraft(final Account account, final Message message)
{
Message localMessage = null;
try
{
LocalStore localStore = account.getLocalStore();
LocalFolder localFolder = localStore.getFolder(account.getDraftsFolderName());
localFolder.open(OpenMode.READ_WRITE);
localFolder.appendMessages(new Message[]
{
message
});
localMessage = localFolder.getMessage(message.getUid());
localMessage.setFlag(Flag.X_DOWNLOADED_FULL, true);
PendingCommand command = new PendingCommand();
command.command = PENDING_COMMAND_APPEND;
command.arguments = new String[]
{
localFolder.getName(),
localMessage.getUid()
};
queuePendingCommand(account, command);
processPendingCommands(account);
}
catch (MessagingException e)
{
Log.e(K9.LOG_TAG, "Unable to save message as draft.", e);
addErrorMessage(account, null, e);
}
return localMessage;
}
public boolean modeMismatch(Account.FolderMode aMode, Folder.FolderClass fMode)
{
if (aMode == Account.FolderMode.NONE
|| (aMode == Account.FolderMode.FIRST_CLASS &&
fMode != Folder.FolderClass.FIRST_CLASS)
|| (aMode == Account.FolderMode.FIRST_AND_SECOND_CLASS &&
fMode != Folder.FolderClass.FIRST_CLASS &&
fMode != Folder.FolderClass.SECOND_CLASS)
|| (aMode == Account.FolderMode.NOT_SECOND_CLASS &&
fMode == Folder.FolderClass.SECOND_CLASS))
{
return true;
}
else
{
return false;
}
}
static AtomicInteger sequencing = new AtomicInteger(0);
class Command implements Comparable<Command>
{
public Runnable runnable;
public MessagingListener listener;
public String description;
boolean isForeground;
int sequence = sequencing.getAndIncrement();
@Override
public int compareTo(Command other)
{
if (other.isForeground && !isForeground)
{
return 1;
}
else if (!other.isForeground && isForeground)
{
return -1;
}
else
{
return (sequence - other.sequence);
}
}
}
public MessagingListener getCheckMailListener()
{
return checkMailListener;
}
public void setCheckMailListener(MessagingListener checkMailListener)
{
if (this.checkMailListener != null)
{
removeListener(this.checkMailListener);
}
this.checkMailListener = checkMailListener;
if (this.checkMailListener != null)
{
addListener(this.checkMailListener);
}
}
public SORT_TYPE getSortType()
{
return sortType;
}
public void setSortType(SORT_TYPE sortType)
{
this.sortType = sortType;
}
public boolean isSortAscending(SORT_TYPE sortType)
{
Boolean sortAsc = sortAscending.get(sortType);
if (sortAsc == null)
{
return sortType.isDefaultAscending();
}
else return sortAsc;
}
public void setSortAscending(SORT_TYPE sortType, boolean nsortAscending)
{
sortAscending.put(sortType, nsortAscending);
}
public Collection<Pusher> getPushers()
{
return pushers.values();
}
public boolean setupPushing(final Account account)
{
try
{
Pusher previousPusher = pushers.remove(account);
if (previousPusher != null)
{
previousPusher.stop();
}
Preferences prefs = Preferences.getPreferences(mApplication);
Account.FolderMode aDisplayMode = account.getFolderDisplayMode();
Account.FolderMode aPushMode = account.getFolderPushMode();
List<String> names = new ArrayList<String>();
Store localStore = account.getLocalStore();
for (final Folder folder : localStore.getPersonalNamespaces(false))
{
if (folder.getName().equals(account.getErrorFolderName())
|| folder.getName().equals(account.getOutboxFolderName()))
{
/*
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Not pushing folder " + folder.getName() +
" which should never be pushed");
*/
continue;
}
folder.open(Folder.OpenMode.READ_WRITE);
folder.refresh(prefs);
Folder.FolderClass fDisplayClass = folder.getDisplayClass();
Folder.FolderClass fPushClass = folder.getPushClass();
if (modeMismatch(aDisplayMode, fDisplayClass))
{
// Never push a folder that isn't displayed
/*
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Not pushing folder " + folder.getName() +
" which is in display class " + fDisplayClass + " while account is in display mode " + aDisplayMode);
*/
continue;
}
if (modeMismatch(aPushMode, fPushClass))
{
// Do not push folders in the wrong class
/*
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Not pushing folder " + folder.getName() +
" which is in push mode " + fPushClass + " while account is in push mode " + aPushMode);
*/
continue;
}
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Starting pusher for " + account.getDescription() + ":" + folder.getName());
names.add(folder.getName());
}
if (names.size() > 0)
{
PushReceiver receiver = new MessagingControllerPushReceiver(mApplication, account, this);
int maxPushFolders = account.getMaxPushFolders();
if (names.size() > maxPushFolders)
{
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Count of folders to push for account " + account.getDescription() + " is " + names.size()
+ ", greater than limit of " + maxPushFolders + ", truncating");
names = names.subList(0, maxPushFolders);
}
try
{
Store store = account.getRemoteStore();
if (!store.isPushCapable())
{
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Account " + account.getDescription() + " is not push capable, skipping");
return false;
}
Pusher pusher = store.getPusher(receiver);
if (pusher != null)
{
Pusher oldPusher = pushers.putIfAbsent(account, pusher);
if (oldPusher == null)
{
pusher.start(names);
}
}
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Could not get remote store", e);
return false;
}
return true;
}
else
{
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "No folders are configured for pushing in account " + account.getDescription());
return false;
}
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Got exception while setting up pushing", e);
}
return false;
}
public void stopAllPushing()
{
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Stopping all pushers");
Iterator<Pusher> iter = pushers.values().iterator();
while (iter.hasNext())
{
Pusher pusher = iter.next();
iter.remove();
pusher.stop();
}
}
public void messagesArrived(final Account account, final Folder remoteFolder, final List<Message> messages, final boolean flagSyncOnly)
{
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Got new pushed email messages for account " + account.getDescription()
+ ", folder " + remoteFolder.getName());
final CountDownLatch latch = new CountDownLatch(1);
putBackground("Push messageArrived of account " + account.getDescription()
+ ", folder " + remoteFolder.getName(), null, new Runnable()
{
public void run()
{
LocalFolder localFolder = null;
try
{
LocalStore localStore = account.getLocalStore();
localFolder= localStore.getFolder(remoteFolder.getName());
localFolder.open(OpenMode.READ_WRITE);
account.setRingNotified(false);
int newCount = downloadMessages(account, remoteFolder, localFolder, messages, flagSyncOnly);
int unreadMessageCount = setLocalUnreadCountToRemote(localFolder, remoteFolder, messages.size());
setLocalFlaggedCountToRemote(localFolder, remoteFolder);
localFolder.setLastPush(System.currentTimeMillis());
localFolder.setStatus(null);
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "messagesArrived newCount = " + newCount + ", unread count = " + unreadMessageCount);
if (unreadMessageCount == 0)
{
notifyAccountCancel(mApplication, account);
}
for (MessagingListener l : getListeners())
{
l.folderStatusChanged(account, remoteFolder.getName(), unreadMessageCount);
}
}
catch (Exception e)
{
String rootMessage = getRootCauseMessage(e);
String errorMessage = "Push failed: " + rootMessage;
try
{
localFolder.setStatus(errorMessage);
}
catch (Exception se)
{
Log.e(K9.LOG_TAG, "Unable to set failed status on localFolder", se);
}
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxFailed(account, remoteFolder.getName(), errorMessage);
}
addErrorMessage(account, null, e);
}
finally
{
if (localFolder != null)
{
try
{
localFolder.close();
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Unable to close localFolder", e);
}
}
latch.countDown();
}
}
});
try
{
latch.await();
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Interrupted while awaiting latch release", e);
}
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "MessagingController.messagesArrivedLatch released");
}
enum MemorizingState { STARTED, FINISHED, FAILED };
class Memory
{
Account account;
String folderName;
MemorizingState syncingState = null;
MemorizingState sendingState = null;
MemorizingState pushingState = null;
MemorizingState processingState = null;
String failureMessage = null;
int syncingTotalMessagesInMailbox;
int syncingNumNewMessages;
int folderCompleted = 0;
int folderTotal = 0;
String processingCommandTitle = null;
Memory(Account nAccount, String nFolderName)
{
account = nAccount;
folderName = nFolderName;
}
String getKey()
{
return getMemoryKey(account, folderName);
}
}
static String getMemoryKey(Account taccount, String tfolderName)
{
return taccount.getDescription() + ":" + tfolderName;
}
class MemorizingListener extends MessagingListener
{
HashMap<String, Memory> memories = new HashMap<String, Memory>(31);
Memory getMemory(Account account, String folderName)
{
Memory memory = memories.get(getMemoryKey(account, folderName));
if (memory == null)
{
memory = new Memory(account, folderName);
memories.put(memory.getKey(), memory);
}
return memory;
}
@Override
public synchronized void synchronizeMailboxStarted(Account account, String folder)
{
Memory memory = getMemory(account, folder);
memory.syncingState = MemorizingState.STARTED;
memory.folderCompleted = 0;
memory.folderTotal = 0;
}
@Override
public synchronized void synchronizeMailboxFinished(Account account, String folder,
int totalMessagesInMailbox, int numNewMessages)
{
Memory memory = getMemory(account, folder);
memory.syncingState = MemorizingState.FINISHED;
memory.syncingTotalMessagesInMailbox = totalMessagesInMailbox;
memory.syncingNumNewMessages = numNewMessages;
}
@Override
public synchronized void synchronizeMailboxFailed(Account account, String folder,
String message)
{
Memory memory = getMemory(account, folder);
memory.syncingState = MemorizingState.FAILED;
memory.failureMessage = message;
}
synchronized void refreshOther(MessagingListener other)
{
if (other != null)
{
Memory syncStarted = null;
Memory sendStarted = null;
Memory processingStarted = null;
for (Memory memory : memories.values())
{
if (memory.syncingState != null)
{
switch (memory.syncingState)
{
case STARTED:
syncStarted = memory;
break;
case FINISHED:
other.synchronizeMailboxFinished(memory.account, memory.folderName,
memory.syncingTotalMessagesInMailbox, memory.syncingNumNewMessages);
break;
case FAILED:
other.synchronizeMailboxFailed(memory.account, memory.folderName,
memory.failureMessage);
break;
}
}
if (memory.sendingState != null)
{
switch (memory.sendingState)
{
case STARTED:
sendStarted = memory;
break;
case FINISHED:
other.sendPendingMessagesCompleted(memory.account);
break;
case FAILED:
other.sendPendingMessagesFailed(memory.account);
break;
}
}
if (memory.pushingState != null)
{
switch (memory.pushingState)
{
case STARTED:
other.setPushActive(memory.account, memory.folderName, true);
break;
case FINISHED:
other.setPushActive(memory.account, memory.folderName, false);
break;
}
}
if (memory.processingState != null)
{
switch (memory.processingState)
{
case STARTED:
processingStarted = memory;
break;
case FINISHED:
case FAILED:
other.pendingCommandsFinished(memory.account);
break;
}
}
}
Memory somethingStarted = null;
if (syncStarted != null)
{
other.synchronizeMailboxStarted(syncStarted.account, syncStarted.folderName);
somethingStarted = syncStarted;
}
if (sendStarted != null)
{
other.sendPendingMessagesStarted(sendStarted.account);
somethingStarted = sendStarted;
}
if (processingStarted != null)
{
other.pendingCommandsProcessing(processingStarted.account);
if (processingStarted.processingCommandTitle != null)
{
other.pendingCommandStarted(processingStarted.account, processingStarted.processingCommandTitle);
}
else
{
other.pendingCommandCompleted(processingStarted.account, processingStarted.processingCommandTitle);
}
somethingStarted = processingStarted;
}
if (somethingStarted != null && somethingStarted.folderTotal > 0)
{
other.synchronizeMailboxProgress(somethingStarted.account, somethingStarted.folderName, somethingStarted.folderCompleted, somethingStarted.folderTotal);
}
}
}
@Override
public synchronized void setPushActive(Account account, String folderName, boolean active)
{
Memory memory = getMemory(account, folderName);
memory.pushingState = (active ? MemorizingState.STARTED : MemorizingState.FINISHED);
}
@Override
public synchronized void sendPendingMessagesStarted(Account account)
{
Memory memory = getMemory(account, null);
memory.sendingState = MemorizingState.STARTED;
memory.folderCompleted = 0;
memory.folderTotal = 0;
}
@Override
public synchronized void sendPendingMessagesCompleted(Account account)
{
Memory memory = getMemory(account, null);
memory.sendingState = MemorizingState.FINISHED;
}
@Override
public synchronized void sendPendingMessagesFailed(Account account)
{
Memory memory = getMemory(account, null);
memory.sendingState = MemorizingState.FAILED;
}
@Override
public synchronized void synchronizeMailboxProgress(Account account, String folderName, int completed, int total)
{
Memory memory = getMemory(account, folderName);
memory.folderCompleted = completed;
memory.folderTotal = total;
}
@Override
public synchronized void pendingCommandsProcessing(Account account)
{
Memory memory = getMemory(account, null);
memory.processingState = MemorizingState.STARTED;
memory.folderCompleted = 0;
memory.folderTotal = 0;
}
@Override
public synchronized void pendingCommandsFinished(Account account)
{
Memory memory = getMemory(account, null);
memory.processingState = MemorizingState.FINISHED;
}
@Override
public synchronized void pendingCommandStarted(Account account, String commandTitle)
{
Memory memory = getMemory(account, null);
memory.processingCommandTitle = commandTitle;
}
@Override
public synchronized void pendingCommandCompleted(Account account, String commandTitle)
{
Memory memory = getMemory(account, null);
memory.processingCommandTitle = null;
}
}
private void actOnMessages(Message[] messages, MessageActor actor)
{
Map<Account, Map<Folder, List<Message>>> accountMap = new HashMap<Account, Map<Folder, List<Message>>>();
for (Message message : messages)
{
Folder folder = message.getFolder();
Account account = folder.getAccount();
Map<Folder, List<Message>> folderMap = accountMap.get(account);
if (folderMap == null)
{
folderMap = new HashMap<Folder, List<Message>>();
accountMap.put(account, folderMap);
}
List<Message> messageList = folderMap.get(folder);
if (messageList == null)
{
messageList = new LinkedList<Message>();
folderMap.put(folder, messageList);
}
messageList.add(message);
}
for (Map.Entry<Account, Map<Folder, List<Message>>> entry : accountMap.entrySet())
{
Account account = entry.getKey();
//account.refresh(Preferences.getPreferences(K9.app));
Map<Folder, List<Message>> folderMap = entry.getValue();
for (Map.Entry<Folder, List<Message>> folderEntry : folderMap.entrySet())
{
Folder folder = folderEntry.getKey();
List<Message> messageList = folderEntry.getValue();
actor.act(account, folder, messageList);
}
}
}
interface MessageActor
{
public void act(final Account account, final Folder folder, final List<Message> messages);
}
}
| src/com/fsck/k9/controller/MessagingController.java |
package com.fsck.k9.controller;
import java.io.CharArrayWriter;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.PriorityBlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import android.app.Application;
import android.app.KeyguardManager;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.media.AudioManager;
import android.net.Uri;
import android.os.PowerManager;
import android.os.Process;
import android.text.TextUtils;
import android.util.Log;
import com.fsck.k9.Account;
import com.fsck.k9.AccountStats;
import com.fsck.k9.K9;
import com.fsck.k9.NotificationSetting;
import com.fsck.k9.Preferences;
import com.fsck.k9.R;
import com.fsck.k9.SearchSpecification;
import com.fsck.k9.activity.FolderList;
import com.fsck.k9.activity.MessageList;
import com.fsck.k9.helper.Utility;
import com.fsck.k9.helper.power.TracingPowerManager;
import com.fsck.k9.helper.power.TracingPowerManager.TracingWakeLock;
import com.fsck.k9.mail.Address;
import com.fsck.k9.mail.FetchProfile;
import com.fsck.k9.mail.Flag;
import com.fsck.k9.mail.Folder;
import com.fsck.k9.mail.Folder.FolderType;
import com.fsck.k9.mail.Folder.OpenMode;
import com.fsck.k9.mail.Message;
import com.fsck.k9.mail.MessagingException;
import com.fsck.k9.mail.Part;
import com.fsck.k9.mail.PushReceiver;
import com.fsck.k9.mail.Pusher;
import com.fsck.k9.mail.Store;
import com.fsck.k9.mail.Transport;
import com.fsck.k9.mail.internet.MimeMessage;
import com.fsck.k9.mail.internet.MimeUtility;
import com.fsck.k9.mail.internet.TextBody;
import com.fsck.k9.mail.store.UnavailableAccountException;
import com.fsck.k9.mail.store.LocalStore;
import com.fsck.k9.mail.store.UnavailableStorageException;
import com.fsck.k9.mail.store.LocalStore.LocalFolder;
import com.fsck.k9.mail.store.LocalStore.LocalMessage;
import com.fsck.k9.mail.store.LocalStore.PendingCommand;
/**
* Starts a long running (application) Thread that will run through commands
* that require remote mailbox access. This class is used to serialize and
* prioritize these commands. Each method that will submit a command requires a
* MessagingListener instance to be provided. It is expected that that listener
* has also been added as a registered listener using addListener(). When a
* command is to be executed, if the listener that was provided with the command
* is no longer registered the command is skipped. The design idea for the above
* is that when an Activity starts it registers as a listener. When it is paused
* it removes itself. Thus, any commands that that activity submitted are
* removed from the queue once the activity is no longer active.
*/
public class MessagingController implements Runnable
{
/**
* Immutable empty {@link String} array
*/
private static final String[] EMPTY_STRING_ARRAY = new String[0];
/**
* Immutable empty {@link Message} array
*/
private static final Message[] EMPTY_MESSAGE_ARRAY = new Message[0];
/**
* Immutable empty {@link Folder} array
*/
private static final Folder[] EMPTY_FOLDER_ARRAY = new Folder[0];
/**
* The maximum message size that we'll consider to be "small". A small message is downloaded
* in full immediately instead of in pieces. Anything over this size will be downloaded in
* pieces with attachments being left off completely and downloaded on demand.
*
*
* 25k for a "small" message was picked by educated trial and error.
* http://answers.google.com/answers/threadview?id=312463 claims that the
* average size of an email is 59k, which I feel is too large for our
* blind download. The following tests were performed on a download of
* 25 random messages.
* <pre>
* 5k - 61 seconds,
* 25k - 51 seconds,
* 55k - 53 seconds,
* </pre>
* So 25k gives good performance and a reasonable data footprint. Sounds good to me.
*/
private static final String PENDING_COMMAND_MOVE_OR_COPY = "com.fsck.k9.MessagingController.moveOrCopy";
private static final String PENDING_COMMAND_MOVE_OR_COPY_BULK = "com.fsck.k9.MessagingController.moveOrCopyBulk";
private static final String PENDING_COMMAND_EMPTY_TRASH = "com.fsck.k9.MessagingController.emptyTrash";
private static final String PENDING_COMMAND_SET_FLAG_BULK = "com.fsck.k9.MessagingController.setFlagBulk";
private static final String PENDING_COMMAND_SET_FLAG = "com.fsck.k9.MessagingController.setFlag";
private static final String PENDING_COMMAND_APPEND = "com.fsck.k9.MessagingController.append";
private static final String PENDING_COMMAND_MARK_ALL_AS_READ = "com.fsck.k9.MessagingController.markAllAsRead";
private static final String PENDING_COMMAND_EXPUNGE = "com.fsck.k9.MessagingController.expunge";
private static MessagingController inst = null;
private BlockingQueue<Command> mCommands = new PriorityBlockingQueue<Command>();
private Thread mThread;
private Set<MessagingListener> mListeners = new CopyOnWriteArraySet<MessagingListener>();
private HashMap<SORT_TYPE, Boolean> sortAscending = new HashMap<SORT_TYPE, Boolean>();
private ConcurrentHashMap<String, AtomicInteger> sendCount = new ConcurrentHashMap<String, AtomicInteger>();
ConcurrentHashMap<Account, Pusher> pushers = new ConcurrentHashMap<Account, Pusher>();
private final ExecutorService threadPool = Executors.newCachedThreadPool();
public enum SORT_TYPE
{
SORT_DATE(R.string.sort_earliest_first, R.string.sort_latest_first, false),
SORT_SUBJECT(R.string.sort_subject_alpha, R.string.sort_subject_re_alpha, true),
SORT_SENDER(R.string.sort_sender_alpha, R.string.sort_sender_re_alpha, true),
SORT_UNREAD(R.string.sort_unread_first, R.string.sort_unread_last, true),
SORT_FLAGGED(R.string.sort_flagged_first, R.string.sort_flagged_last, true),
SORT_ATTACHMENT(R.string.sort_attach_first, R.string.sort_unattached_first, true);
private int ascendingToast;
private int descendingToast;
private boolean defaultAscending;
SORT_TYPE(int ascending, int descending, boolean ndefaultAscending)
{
ascendingToast = ascending;
descendingToast = descending;
defaultAscending = ndefaultAscending;
}
public int getToast(boolean ascending)
{
if (ascending)
{
return ascendingToast;
}
else
{
return descendingToast;
}
}
public boolean isDefaultAscending()
{
return defaultAscending;
}
};
private SORT_TYPE sortType = SORT_TYPE.SORT_DATE;
private MessagingListener checkMailListener = null;
private MemorizingListener memorizingListener = new MemorizingListener();
private boolean mBusy;
private Application mApplication;
// Key is accountUuid:folderName:messageUid , value is unimportant
private ConcurrentHashMap<String, String> deletedUids = new ConcurrentHashMap<String, String>();
private String createMessageKey(Account account, String folder, Message message)
{
return createMessageKey(account, folder, message.getUid());
}
private String createMessageKey(Account account, String folder, String uid)
{
return account.getUuid() + ":" + folder + ":" + uid;
}
private void suppressMessage(Account account, String folder, Message message)
{
if (account == null || folder == null || message == null)
{
return;
}
String messKey = createMessageKey(account, folder, message);
deletedUids.put(messKey, "true");
}
private void unsuppressMessage(Account account, String folder, String uid)
{
if (account == null || folder == null || uid == null)
{
return;
}
String messKey = createMessageKey(account, folder, uid);
deletedUids.remove(messKey);
}
private boolean isMessageSuppressed(Account account, String folder, Message message)
{
if (account == null || folder == null || message == null)
{
return false;
}
String messKey = createMessageKey(account, folder, message);
if (deletedUids.containsKey(messKey))
{
return true;
}
return false;
}
private MessagingController(Application application)
{
mApplication = application;
mThread = new Thread(this);
mThread.start();
if (memorizingListener != null)
{
addListener(memorizingListener);
}
}
/**
* Gets or creates the singleton instance of MessagingController. Application is used to
* provide a Context to classes that need it.
* @param application
* @return
*/
public synchronized static MessagingController getInstance(Application application)
{
if (inst == null)
{
inst = new MessagingController(application);
}
return inst;
}
public boolean isBusy()
{
return mBusy;
}
public void run()
{
Process.setThreadPriority(Process.THREAD_PRIORITY_BACKGROUND);
while (true)
{
String commandDescription = null;
try
{
final Command command = mCommands.take();
if (command != null)
{
commandDescription = command.description;
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Running " + (command.isForeground ? "Foreground" : "Background") + " command '" + command.description + "', seq = " + command.sequence);
mBusy = true;
try
{
command.runnable.run();
}
catch (UnavailableAccountException e)
{
// retry later
new Thread()
{
@Override
public void run()
{
try
{
sleep(30 * 1000);
mCommands.put(command);
}
catch (InterruptedException e)
{
Log.e(K9.LOG_TAG, "interrupted while putting a pending command for"
+ " an unavailable account back into the queue."
+ " THIS SHOULD NEVER HAPPEN.");
}
}
} .start();
}
if (K9.DEBUG)
Log.i(K9.LOG_TAG, (command.isForeground ? "Foreground" : "Background") +
" Command '" + command.description + "' completed");
for (MessagingListener l : getListeners(command.listener))
{
l.controllerCommandCompleted(mCommands.size() > 0);
}
}
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Error running command '" + commandDescription + "'", e);
}
mBusy = false;
}
}
private void put(String description, MessagingListener listener, Runnable runnable)
{
putCommand(mCommands, description, listener, runnable, true);
}
private void putBackground(String description, MessagingListener listener, Runnable runnable)
{
putCommand(mCommands, description, listener, runnable, false);
}
private void putCommand(BlockingQueue<Command> queue, String description, MessagingListener listener, Runnable runnable, boolean isForeground)
{
int retries = 10;
Exception e = null;
while (retries-- > 0)
{
try
{
Command command = new Command();
command.listener = listener;
command.runnable = runnable;
command.description = description;
command.isForeground = isForeground;
queue.put(command);
return;
}
catch (InterruptedException ie)
{
try
{
Thread.sleep(200);
}
catch (InterruptedException ne)
{
}
e = ie;
}
}
throw new Error(e);
}
public void addListener(MessagingListener listener)
{
mListeners.add(listener);
refreshListener(listener);
}
public void refreshListener(MessagingListener listener)
{
if (memorizingListener != null && listener != null)
{
memorizingListener.refreshOther(listener);
}
}
public void removeListener(MessagingListener listener)
{
mListeners.remove(listener);
}
public Set<MessagingListener> getListeners()
{
return mListeners;
}
public Set<MessagingListener> getListeners(MessagingListener listener)
{
if (listener == null)
{
return mListeners;
}
Set<MessagingListener> listeners = new HashSet<MessagingListener>(mListeners);
listeners.add(listener);
return listeners;
}
/**
* Lists folders that are available locally and remotely. This method calls
* listFoldersCallback for local folders before it returns, and then for
* remote folders at some later point. If there are no local folders
* includeRemote is forced by this method. This method should be called from
* a Thread as it may take several seconds to list the local folders.
* TODO this needs to cache the remote folder list
*
* @param account
* @param includeRemote
* @param listener
* @throws MessagingException
*/
public void listFolders(final Account account, final boolean refreshRemote, final MessagingListener listener)
{
threadPool.execute(new Runnable()
{
public void run()
{
listFoldersSynchronous(account, refreshRemote, listener);
}
});
}
/**
* Lists folders that are available locally and remotely. This method calls
* listFoldersCallback for local folders before it returns, and then for
* remote folders at some later point. If there are no local folders
* includeRemote is forced by this method. This method is called in the
* foreground.
* TODO this needs to cache the remote folder list
*
* @param account
* @param includeRemote
* @param listener
* @throws MessagingException
*/
public void listFoldersSynchronous(final Account account, final boolean refreshRemote, final MessagingListener listener)
{
for (MessagingListener l : getListeners(listener))
{
l.listFoldersStarted(account);
}
List<? extends Folder> localFolders = null;
if (!account.isAvailable(mApplication))
{
Log.i(K9.LOG_TAG, "not listing folders of unavailable account");
}
else
{
try
{
Store localStore = account.getLocalStore();
localFolders = localStore.getPersonalNamespaces(false);
Folder[] folderArray = localFolders.toArray(EMPTY_FOLDER_ARRAY);
if (refreshRemote || localFolders == null || localFolders.size() == 0)
{
doRefreshRemote(account, listener);
return;
}
for (MessagingListener l : getListeners(listener))
{
l.listFolders(account, folderArray);
}
}
catch (Exception e)
{
for (MessagingListener l : getListeners(listener))
{
l.listFoldersFailed(account, e.getMessage());
}
addErrorMessage(account, null, e);
return;
}
finally
{
if (localFolders != null)
{
for (Folder localFolder : localFolders)
{
if (localFolder != null)
{
localFolder.close();
}
}
}
}
}
for (MessagingListener l : getListeners(listener))
{
l.listFoldersFinished(account);
}
}
private void doRefreshRemote(final Account account, MessagingListener listener)
{
put("doRefreshRemote", listener, new Runnable()
{
public void run()
{
List<? extends Folder> localFolders = null;
try
{
Store store = account.getRemoteStore();
List<? extends Folder> remoteFolders = store.getPersonalNamespaces(false);
LocalStore localStore = account.getLocalStore();
HashSet<String> remoteFolderNames = new HashSet<String>();
for (int i = 0, count = remoteFolders.size(); i < count; i++)
{
LocalFolder localFolder = localStore.getFolder(remoteFolders.get(i).getName());
if (!localFolder.exists())
{
localFolder.create(FolderType.HOLDS_MESSAGES, account.getDisplayCount());
}
remoteFolderNames.add(remoteFolders.get(i).getName());
}
localFolders = localStore.getPersonalNamespaces(false);
/*
* Clear out any folders that are no longer on the remote store.
*/
for (Folder localFolder : localFolders)
{
String localFolderName = localFolder.getName();
if (localFolderName.equalsIgnoreCase(K9.INBOX) ||
localFolderName.equals(account.getTrashFolderName()) ||
localFolderName.equals(account.getOutboxFolderName()) ||
localFolderName.equals(account.getDraftsFolderName()) ||
localFolderName.equals(account.getSentFolderName()) ||
localFolderName.equals(account.getErrorFolderName()))
{
continue;
}
if (!remoteFolderNames.contains(localFolder.getName()))
{
localFolder.delete(false);
}
}
localFolders = localStore.getPersonalNamespaces(false);
Folder[] folderArray = localFolders.toArray(EMPTY_FOLDER_ARRAY);
for (MessagingListener l : getListeners())
{
l.listFolders(account, folderArray);
}
for (MessagingListener l : getListeners())
{
l.listFoldersFinished(account);
}
}
catch (Exception e)
{
for (MessagingListener l : getListeners())
{
l.listFoldersFailed(account, "");
}
addErrorMessage(account, null, e);
}
finally
{
if (localFolders != null)
{
for (Folder localFolder : localFolders)
{
if (localFolder != null)
{
localFolder.close();
}
}
}
}
}
});
}
/**
* List the messages in the local message store for the given folder asynchronously.
*
* @param account
* @param folder
* @param listener
* @throws MessagingException
*/
public void listLocalMessages(final Account account, final String folder, final MessagingListener listener)
{
threadPool.execute(new Runnable()
{
public void run()
{
listLocalMessagesSynchronous(account, folder, listener);
}
});
}
/**
* List the messages in the local message store for the given folder synchronously.
*
* @param account
* @param folder
* @param listener
* @throws MessagingException
*/
public void listLocalMessagesSynchronous(final Account account, final String folder, final MessagingListener listener)
{
for (MessagingListener l : getListeners(listener))
{
l.listLocalMessagesStarted(account, folder);
}
Folder localFolder = null;
MessageRetrievalListener retrievalListener =
new MessageRetrievalListener()
{
List<Message> pendingMessages = new ArrayList<Message>();
int totalDone = 0;
public void messageStarted(String message, int number, int ofTotal) {}
public void messageFinished(Message message, int number, int ofTotal)
{
if (!isMessageSuppressed(account, folder, message))
{
pendingMessages.add(message);
totalDone++;
if (pendingMessages.size() > 10)
{
addPendingMessages();
}
}
else
{
for (MessagingListener l : getListeners(listener))
{
l.listLocalMessagesRemoveMessage(account, folder, message);
}
}
}
public void messagesFinished(int number)
{
addPendingMessages();
}
private void addPendingMessages()
{
for (MessagingListener l : getListeners(listener))
{
l.listLocalMessagesAddMessages(account, folder, pendingMessages);
}
pendingMessages.clear();
}
};
try
{
Store localStore = account.getLocalStore();
localFolder = localStore.getFolder(folder);
localFolder.open(OpenMode.READ_WRITE);
localFolder.getMessages(
retrievalListener,
false // Skip deleted messages
);
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Got ack that callbackRunner finished");
for (MessagingListener l : getListeners(listener))
{
l.listLocalMessagesFinished(account, folder);
}
}
catch (Exception e)
{
for (MessagingListener l : getListeners(listener))
{
l.listLocalMessagesFailed(account, folder, e.getMessage());
}
addErrorMessage(account, null, e);
}
finally
{
if (localFolder != null)
{
localFolder.close();
}
}
}
public void searchLocalMessages(SearchSpecification searchSpecification, final Message[] messages, final MessagingListener listener)
{
searchLocalMessages(searchSpecification.getAccountUuids(), searchSpecification.getFolderNames(), messages,
searchSpecification.getQuery(), searchSpecification.isIntegrate(), searchSpecification.getRequiredFlags(), searchSpecification.getForbiddenFlags(), listener);
}
/**
* Find all messages in any local account which match the query 'query'
* @param folderNames TODO
* @param query
* @param listener
* @param searchAccounts TODO
* @param account TODO
* @param account
* @throws MessagingException
*/
public void searchLocalMessages(final String[] accountUuids, final String[] folderNames, final Message[] messages, final String query, final boolean integrate,
final Flag[] requiredFlags, final Flag[] forbiddenFlags, final MessagingListener listener)
{
if (K9.DEBUG)
{
Log.i(K9.LOG_TAG, "searchLocalMessages ("
+ "accountUuids=" + Utility.combine(accountUuids, ',')
+ ", folderNames = " + Utility.combine(folderNames, ',')
+ ", messages.size() = " + (messages != null ? messages.length : null)
+ ", query = " + query
+ ", integrate = " + integrate
+ ", requiredFlags = " + Utility.combine(requiredFlags, ',')
+ ", forbiddenFlags = " + Utility.combine(forbiddenFlags, ',')
+ ")");
}
threadPool.execute(new Runnable()
{
public void run()
{
searchLocalMessagesSynchronous(accountUuids,folderNames, messages, query, integrate, requiredFlags, forbiddenFlags, listener);
}
});
}
public void searchLocalMessagesSynchronous(final String[] accountUuids, final String[] folderNames, final Message[] messages, final String query, final boolean integrate, final Flag[] requiredFlags, final Flag[] forbiddenFlags, final MessagingListener listener)
{
final AccountStats stats = new AccountStats();
final Set<String> accountUuidsSet = new HashSet<String>();
if (accountUuids != null)
{
for (String accountUuid : accountUuids)
{
accountUuidsSet.add(accountUuid);
}
}
final Preferences prefs = Preferences.getPreferences(mApplication.getApplicationContext());
Account[] accounts = prefs.getAccounts();
List<LocalFolder> foldersToSearch = null;
boolean displayableOnly = false;
boolean noSpecialFolders = true;
for (final Account account : accounts)
{
if (!account.isAvailable(mApplication))
{
Log.d(K9.LOG_TAG, "searchLocalMessagesSynchronous() ignores account that is not available");
continue;
}
if (accountUuids != null && !accountUuidsSet.contains(account.getUuid()))
{
continue;
}
if (accountUuids != null && accountUuidsSet.contains(account.getUuid()))
{
displayableOnly = true;
noSpecialFolders = true;
}
else if (!integrate && folderNames == null)
{
Account.Searchable searchableFolders = account.getSearchableFolders();
switch (searchableFolders)
{
case NONE:
continue;
case DISPLAYABLE:
displayableOnly = true;
break;
}
}
List<Message> messagesToSearch = null;
if (messages != null)
{
messagesToSearch = new LinkedList<Message>();
for (Message message : messages)
{
if (message.getFolder().getAccount().getUuid().equals(account.getUuid()))
{
messagesToSearch.add(message);
}
}
if (messagesToSearch.isEmpty())
{
continue;
}
}
if (listener != null)
{
listener.listLocalMessagesStarted(account, null);
}
if (integrate || displayableOnly || folderNames != null || noSpecialFolders)
{
List<LocalFolder> tmpFoldersToSearch = new LinkedList<LocalFolder>();
try
{
LocalStore store = account.getLocalStore();
List<? extends Folder> folders = store.getPersonalNamespaces(false);
Set<String> folderNameSet = null;
if (folderNames != null)
{
folderNameSet = new HashSet<String>();
for (String folderName : folderNames)
{
folderNameSet.add(folderName);
}
}
for (Folder folder : folders)
{
LocalFolder localFolder = (LocalFolder)folder;
boolean include = true;
folder.refresh(prefs);
String localFolderName = localFolder.getName();
if (integrate)
{
include = localFolder.isIntegrate();
}
else
{
if (folderNameSet != null)
{
if (!folderNameSet.contains(localFolderName))
{
include = false;
}
}
// Never exclude the INBOX (see issue 1817)
else if (noSpecialFolders && !localFolderName.equals(K9.INBOX) && (
localFolderName.equals(account.getTrashFolderName()) ||
localFolderName.equals(account.getOutboxFolderName()) ||
localFolderName.equals(account.getDraftsFolderName()) ||
localFolderName.equals(account.getSentFolderName()) ||
localFolderName.equals(account.getErrorFolderName())))
{
include = false;
}
else if (displayableOnly && modeMismatch(account.getFolderDisplayMode(), folder.getDisplayClass()))
{
include = false;
}
}
if (include)
{
tmpFoldersToSearch.add(localFolder);
}
}
if (tmpFoldersToSearch.size() < 1)
{
continue;
}
foldersToSearch = tmpFoldersToSearch;
}
catch (MessagingException me)
{
Log.e(K9.LOG_TAG, "Unable to restrict search folders in Account " + account.getDescription() + ", searching all", me);
addErrorMessage(account, null, me);
}
}
MessageRetrievalListener retrievalListener = new MessageRetrievalListener()
{
public void messageStarted(String message, int number, int ofTotal) {}
public void messageFinished(Message message, int number, int ofTotal)
{
if (!isMessageSuppressed(message.getFolder().getAccount(), message.getFolder().getName(), message))
{
List<Message> messages = new ArrayList<Message>();
messages.add(message);
stats.unreadMessageCount += (!message.isSet(Flag.SEEN)) ? 1 : 0;
stats.flaggedMessageCount += (message.isSet(Flag.FLAGGED)) ? 1 : 0;
if (listener != null)
{
listener.listLocalMessagesAddMessages(account, null, messages);
}
}
}
public void messagesFinished(int number)
{
}
};
try
{
String[] queryFields = {"html_content","subject","sender_list"};
LocalStore localStore = account.getLocalStore();
localStore.searchForMessages(retrievalListener, queryFields
, query, foldersToSearch,
messagesToSearch == null ? null : messagesToSearch.toArray(EMPTY_MESSAGE_ARRAY),
requiredFlags, forbiddenFlags);
}
catch (Exception e)
{
if (listener != null)
{
listener.listLocalMessagesFailed(account, null, e.getMessage());
}
addErrorMessage(account, null, e);
}
finally
{
if (listener != null)
{
listener.listLocalMessagesFinished(account, null);
}
}
}
if (listener != null)
{
listener.searchStats(stats);
}
}
public void loadMoreMessages(Account account, String folder, MessagingListener listener)
{
try
{
LocalStore localStore = account.getLocalStore();
LocalFolder localFolder = localStore.getFolder(folder);
if (localFolder.getVisibleLimit() > 0 )
{
localFolder.setVisibleLimit(localFolder.getVisibleLimit() + account.getDisplayCount());
}
synchronizeMailbox(account, folder, listener, null);
}
catch (MessagingException me)
{
addErrorMessage(account, null, me);
throw new RuntimeException("Unable to set visible limit on folder", me);
}
}
public void resetVisibleLimits(Collection<Account> accounts)
{
for (Account account : accounts)
{
account.resetVisibleLimits();
}
}
/**
* Start background synchronization of the specified folder.
* @param account
* @param folder
* @param listener
* @param providedRemoteFolder TODO
*/
public void synchronizeMailbox(final Account account, final String folder, final MessagingListener listener, final Folder providedRemoteFolder)
{
putBackground("synchronizeMailbox", listener, new Runnable()
{
public void run()
{
synchronizeMailboxSynchronous(account, folder, listener, providedRemoteFolder);
}
});
}
/**
* Start foreground synchronization of the specified folder. This is generally only called
* by synchronizeMailbox.
* @param account
* @param folder
*
* TODO Break this method up into smaller chunks.
* @param providedRemoteFolder TODO
*/
private void synchronizeMailboxSynchronous(final Account account, final String folder, final MessagingListener listener, Folder providedRemoteFolder)
{
Folder remoteFolder = null;
LocalFolder tLocalFolder = null;
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Synchronizing folder " + account.getDescription() + ":" + folder);
for (MessagingListener l : getListeners(listener))
{
l.synchronizeMailboxStarted(account, folder);
}
/*
* We don't ever sync the Outbox or errors folder
*/
if (folder.equals(account.getOutboxFolderName()) || folder.equals(account.getErrorFolderName()))
{
for (MessagingListener l : getListeners(listener))
{
l.synchronizeMailboxFinished(account, folder, 0, 0);
}
return;
}
Exception commandException = null;
try
{
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "SYNC: About to process pending commands for account " +
account.getDescription());
try
{
processPendingCommandsSynchronous(account);
}
catch (Exception e)
{
addErrorMessage(account, null, e);
Log.e(K9.LOG_TAG, "Failure processing command, but allow message sync attempt", e);
commandException = e;
}
/*
* Get the message list from the local store and create an index of
* the uids within the list.
*/
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "SYNC: About to get local folder " + folder);
final LocalStore localStore = account.getLocalStore();
tLocalFolder = localStore.getFolder(folder);
final LocalFolder localFolder = tLocalFolder;
localFolder.open(OpenMode.READ_WRITE);
Message[] localMessages = localFolder.getMessages(null);
HashMap<String, Message> localUidMap = new HashMap<String, Message>();
for (Message message : localMessages)
{
localUidMap.put(message.getUid(), message);
}
if (providedRemoteFolder != null)
{
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "SYNC: using providedRemoteFolder " + folder);
remoteFolder = providedRemoteFolder;
}
else
{
Store remoteStore = account.getRemoteStore();
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "SYNC: About to get remote folder " + folder);
remoteFolder = remoteStore.getFolder(folder);
if (! verifyOrCreateRemoteSpecialFolder(account, folder, remoteFolder, listener))
{
return;
}
/*
* Synchronization process:
Open the folder
Upload any local messages that are marked as PENDING_UPLOAD (Drafts, Sent, Trash)
Get the message count
Get the list of the newest K9.DEFAULT_VISIBLE_LIMIT messages
getMessages(messageCount - K9.DEFAULT_VISIBLE_LIMIT, messageCount)
See if we have each message locally, if not fetch it's flags and envelope
Get and update the unread count for the folder
Update the remote flags of any messages we have locally with an internal date
newer than the remote message.
Get the current flags for any messages we have locally but did not just download
Update local flags
For any message we have locally but not remotely, delete the local message to keep
cache clean.
Download larger parts of any new messages.
(Optional) Download small attachments in the background.
*/
/*
* Open the remote folder. This pre-loads certain metadata like message count.
*/
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "SYNC: About to open remote folder " + folder);
remoteFolder.open(OpenMode.READ_WRITE);
if (Account.EXPUNGE_ON_POLL.equals(account.getExpungePolicy()))
{
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "SYNC: Expunging folder " + account.getDescription() + ":" + folder);
remoteFolder.expunge();
}
}
/*
* Get the remote message count.
*/
int remoteMessageCount = remoteFolder.getMessageCount();
int visibleLimit = localFolder.getVisibleLimit();
if (visibleLimit < 0)
{
visibleLimit = K9.DEFAULT_VISIBLE_LIMIT;
}
Message[] remoteMessageArray = EMPTY_MESSAGE_ARRAY;
final ArrayList<Message> remoteMessages = new ArrayList<Message>();
// final ArrayList<Message> unsyncedMessages = new ArrayList<Message>();
HashMap<String, Message> remoteUidMap = new HashMap<String, Message>();
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "SYNC: Remote message count for folder " + folder + " is " + remoteMessageCount);
final Date earliestDate = account.getEarliestPollDate();
if (remoteMessageCount > 0)
{
/*
* Message numbers start at 1.
*/
int remoteStart;
if (visibleLimit > 0 )
{
remoteStart = Math.max(0, remoteMessageCount - visibleLimit) + 1;
}
else
{
remoteStart = 1;
}
int remoteEnd = remoteMessageCount;
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "SYNC: About to get messages " + remoteStart + " through " + remoteEnd + " for folder " + folder);
final AtomicInteger headerProgress = new AtomicInteger(0);
for (MessagingListener l : getListeners(listener))
{
l.synchronizeMailboxHeadersStarted(account, folder);
}
remoteMessageArray = remoteFolder.getMessages(remoteStart, remoteEnd, earliestDate, null);
int messageCount = remoteMessageArray.length;
for (Message thisMess : remoteMessageArray)
{
headerProgress.incrementAndGet();
for (MessagingListener l : getListeners(listener))
{
l.synchronizeMailboxHeadersProgress(account, folder, headerProgress.get(), messageCount);
}
Message localMessage = localUidMap.get(thisMess.getUid());
if (localMessage == null || !localMessage.olderThan(earliestDate))
{
remoteMessages.add(thisMess);
remoteUidMap.put(thisMess.getUid(), thisMess);
}
}
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "SYNC: Got " + remoteUidMap.size() + " messages for folder " + folder);
remoteMessageArray = null;
for (MessagingListener l : getListeners(listener))
{
l.synchronizeMailboxHeadersFinished(account, folder, headerProgress.get(), remoteUidMap.size());
}
}
else if (remoteMessageCount < 0)
{
throw new Exception("Message count " + remoteMessageCount + " for folder " + folder);
}
/*
* Remove any messages that are in the local store but no longer on the remote store or are too old
*/
if (account.syncRemoteDeletions())
{
for (Message localMessage : localMessages)
{
if (remoteUidMap.get(localMessage.getUid()) == null)
{
localMessage.destroy();
for (MessagingListener l : getListeners(listener))
{
l.synchronizeMailboxRemovedMessage(account, folder, localMessage);
}
}
}
}
localMessages = null;
/*
* Now we download the actual content of messages.
*/
int newMessages = downloadMessages(account, remoteFolder, localFolder, remoteMessages, false);
int unreadMessageCount = setLocalUnreadCountToRemote(localFolder, remoteFolder, newMessages);
setLocalFlaggedCountToRemote(localFolder, remoteFolder);
for (MessagingListener l : getListeners())
{
l.folderStatusChanged(account, folder, unreadMessageCount);
}
/*
* Notify listeners that we're finally done.
*/
localFolder.setLastChecked(System.currentTimeMillis());
localFolder.setStatus(null);
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "Done synchronizing folder " +
account.getDescription() + ":" + folder + " @ " + new Date() +
" with " + newMessages + " new messages");
for (MessagingListener l : getListeners(listener))
{
l.synchronizeMailboxFinished(account, folder, remoteMessageCount, newMessages);
}
if (commandException != null)
{
String rootMessage = getRootCauseMessage(commandException);
Log.e(K9.LOG_TAG, "Root cause failure in " + account.getDescription() + ":" +
tLocalFolder.getName() + " was '" + rootMessage + "'");
localFolder.setStatus(rootMessage);
for (MessagingListener l : getListeners(listener))
{
l.synchronizeMailboxFailed(account, folder, rootMessage);
}
}
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Done synchronizing folder " + account.getDescription() + ":" + folder);
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "synchronizeMailbox", e);
// If we don't set the last checked, it can try too often during
// failure conditions
String rootMessage = getRootCauseMessage(e);
if (tLocalFolder != null)
{
try
{
tLocalFolder.setStatus(rootMessage);
tLocalFolder.setLastChecked(System.currentTimeMillis());
}
catch (MessagingException me)
{
Log.e(K9.LOG_TAG, "Could not set last checked on folder " + account.getDescription() + ":" +
tLocalFolder.getName(), e);
}
}
for (MessagingListener l : getListeners(listener))
{
l.synchronizeMailboxFailed(
account,
folder,
rootMessage);
}
addErrorMessage(account, null, e);
Log.e(K9.LOG_TAG, "Failed synchronizing folder " +
account.getDescription() + ":" + folder + " @ " + new Date());
}
finally
{
if (providedRemoteFolder == null && remoteFolder != null)
{
remoteFolder.close();
}
if (tLocalFolder != null)
{
tLocalFolder.close();
}
}
}
/*
* If the folder is a "special" folder we need to see if it exists
* on the remote server. It if does not exist we'll try to create it. If we
* can't create we'll abort. This will happen on every single Pop3 folder as
* designed and on Imap folders during error conditions. This allows us
* to treat Pop3 and Imap the same in this code.
*/
private boolean verifyOrCreateRemoteSpecialFolder(final Account account, final String folder, final Folder remoteFolder, final MessagingListener listener) throws MessagingException
{
if (folder.equals(account.getTrashFolderName()) ||
folder.equals(account.getSentFolderName()) ||
folder.equals(account.getDraftsFolderName()))
{
if (!remoteFolder.exists())
{
if (!remoteFolder.create(FolderType.HOLDS_MESSAGES))
{
for (MessagingListener l : getListeners(listener))
{
l.synchronizeMailboxFinished(account, folder, 0, 0);
}
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Done synchronizing folder " + folder);
return false;
}
}
}
return true;
}
private int setLocalUnreadCountToRemote(LocalFolder localFolder, Folder remoteFolder, int newMessageCount) throws MessagingException
{
int remoteUnreadMessageCount = remoteFolder.getUnreadMessageCount();
if (remoteUnreadMessageCount != -1)
{
localFolder.setUnreadMessageCount(remoteUnreadMessageCount);
}
else
{
int unreadCount = 0;
Message[] messages = localFolder.getMessages(null, false);
for (Message message : messages)
{
if (!message.isSet(Flag.SEEN) && !message.isSet(Flag.DELETED))
{
unreadCount++;
}
}
localFolder.setUnreadMessageCount(unreadCount);
}
return localFolder.getUnreadMessageCount();
}
private void setLocalFlaggedCountToRemote(LocalFolder localFolder, Folder remoteFolder) throws MessagingException
{
int remoteFlaggedMessageCount = remoteFolder.getFlaggedMessageCount();
if (remoteFlaggedMessageCount != -1)
{
localFolder.setFlaggedMessageCount(remoteFlaggedMessageCount);
}
else
{
int flaggedCount = 0;
Message[] messages = localFolder.getMessages(null, false);
for (Message message : messages)
{
if (message.isSet(Flag.FLAGGED) && !message.isSet(Flag.DELETED))
{
flaggedCount++;
}
}
localFolder.setFlaggedMessageCount(flaggedCount);
}
}
private int downloadMessages(final Account account, final Folder remoteFolder,
final LocalFolder localFolder, List<Message> inputMessages, boolean flagSyncOnly) throws MessagingException
{
final Date earliestDate = account.getEarliestPollDate();
if (earliestDate != null)
{
if (K9.DEBUG)
{
Log.d(K9.LOG_TAG, "Only syncing messages after " + earliestDate);
}
}
final String folder = remoteFolder.getName();
int unreadBeforeStart = 0;
try
{
AccountStats stats = account.getStats(mApplication);
unreadBeforeStart = stats.unreadMessageCount;
}
catch (MessagingException e)
{
Log.e(K9.LOG_TAG, "Unable to getUnreadMessageCount for account: " + account, e);
}
ArrayList<Message> syncFlagMessages = new ArrayList<Message>();
List<Message> unsyncedMessages = new ArrayList<Message>();
final AtomicInteger newMessages = new AtomicInteger(0);
List<Message> messages = new ArrayList<Message>(inputMessages);
for (Message message : messages)
{
if (message.isSet(Flag.DELETED))
{
syncFlagMessages.add(message);
}
else if (!isMessageSuppressed(account, folder, message))
{
Message localMessage = localFolder.getMessage(message.getUid());
if (localMessage == null)
{
if (!flagSyncOnly)
{
if (!message.isSet(Flag.X_DOWNLOADED_FULL) && !message.isSet(Flag.X_DOWNLOADED_PARTIAL))
{
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Message with uid " + message.getUid() + " has not yet been downloaded");
unsyncedMessages.add(message);
}
else
{
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Message with uid " + message.getUid() + " is partially or fully downloaded");
// Store the updated message locally
localFolder.appendMessages(new Message[] { message });
localMessage = localFolder.getMessage(message.getUid());
localMessage.setFlag(Flag.X_DOWNLOADED_FULL, message.isSet(Flag.X_DOWNLOADED_FULL));
localMessage.setFlag(Flag.X_DOWNLOADED_PARTIAL, message.isSet(Flag.X_DOWNLOADED_PARTIAL));
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxAddOrUpdateMessage(account, folder, localMessage);
if (!localMessage.isSet(Flag.SEEN))
{
l.synchronizeMailboxNewMessage(account, folder, localMessage);
}
}
}
}
}
else if (!localMessage.isSet(Flag.DELETED))
{
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Message with uid " + message.getUid() + " is present in the local store");
if (!localMessage.isSet(Flag.X_DOWNLOADED_FULL) && !localMessage.isSet(Flag.X_DOWNLOADED_PARTIAL))
{
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Message with uid " + message.getUid()
+ " is not downloaded, even partially; trying again");
unsyncedMessages.add(message);
}
else
{
String newPushState = remoteFolder.getNewPushState(localFolder.getPushState(), message);
if (newPushState != null)
{
localFolder.setPushState(newPushState);
}
syncFlagMessages.add(message);
}
}
}
}
final AtomicInteger progress = new AtomicInteger(0);
final int todo = unsyncedMessages.size() + syncFlagMessages.size();
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxProgress(account, folder, progress.get(), todo);
}
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "SYNC: Have " + unsyncedMessages.size() + " unsynced messages");
messages.clear();
final ArrayList<Message> largeMessages = new ArrayList<Message>();
final ArrayList<Message> smallMessages = new ArrayList<Message>();
if (unsyncedMessages.size() > 0)
{
/*
* Reverse the order of the messages. Depending on the server this may get us
* fetch results for newest to oldest. If not, no harm done.
*/
Collections.reverse(unsyncedMessages);
int visibleLimit = localFolder.getVisibleLimit();
int listSize = unsyncedMessages.size();
if ((visibleLimit > 0) && (listSize > visibleLimit))
{
unsyncedMessages = unsyncedMessages.subList(listSize - visibleLimit, listSize);
}
FetchProfile fp = new FetchProfile();
if (remoteFolder.supportsFetchingFlags())
{
fp.add(FetchProfile.Item.FLAGS);
}
fp.add(FetchProfile.Item.ENVELOPE);
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "SYNC: About to fetch " + unsyncedMessages.size() + " unsynced messages for folder " + folder);
fetchUnsyncedMessages(account, remoteFolder, localFolder, unsyncedMessages, smallMessages,largeMessages, progress, todo, fp);
// If a message didn't exist, messageFinished won't be called, but we shouldn't try again
// If we got here, nothing failed
for (Message message : unsyncedMessages)
{
String newPushState = remoteFolder.getNewPushState(localFolder.getPushState(), message);
if (newPushState != null)
{
localFolder.setPushState(newPushState);
}
}
if (K9.DEBUG)
{
Log.d(K9.LOG_TAG, "SYNC: Synced unsynced messages for folder " + folder);
}
}
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "SYNC: Have "
+ largeMessages.size() + " large messages and "
+ smallMessages.size() + " small messages out of "
+ unsyncedMessages.size() + " unsynced messages");
unsyncedMessages.clear();
/*
* Grab the content of the small messages first. This is going to
* be very fast and at very worst will be a single up of a few bytes and a single
* download of 625k.
*/
FetchProfile fp = new FetchProfile();
fp.add(FetchProfile.Item.BODY);
// fp.add(FetchProfile.Item.FLAGS);
// fp.add(FetchProfile.Item.ENVELOPE);
downloadSmallMessages(account, remoteFolder, localFolder, smallMessages, progress, unreadBeforeStart, newMessages, todo, fp);
smallMessages.clear();
/*
* Now do the large messages that require more round trips.
*/
fp.clear();
fp.add(FetchProfile.Item.STRUCTURE);
downloadLargeMessages(account, remoteFolder, localFolder, largeMessages, progress, unreadBeforeStart, newMessages, todo, fp);
largeMessages.clear();
/*
* Refresh the flags for any messages in the local store that we didn't just
* download.
*/
refreshLocalMessageFlags(account,remoteFolder,localFolder,syncFlagMessages,progress,todo);
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "SYNC: Synced remote messages for folder " + folder + ", " + newMessages.get() + " new messages");
localFolder.purgeToVisibleLimit(new MessageRemovalListener()
{
public void messageRemoved(Message message)
{
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxRemovedMessage(account, folder, message);
}
}
});
return newMessages.get();
}
private void fetchUnsyncedMessages(final Account account, final Folder remoteFolder,
final LocalFolder localFolder,
List<Message> unsyncedMessages,
final ArrayList<Message> smallMessages,
final ArrayList<Message> largeMessages,
final AtomicInteger progress,
final int todo,
FetchProfile fp) throws MessagingException
{
final String folder = remoteFolder.getName();
final Date earliestDate = account.getEarliestPollDate();
remoteFolder.fetch(unsyncedMessages.toArray(EMPTY_MESSAGE_ARRAY), fp,
new MessageRetrievalListener()
{
public void messageFinished(Message message, int number, int ofTotal)
{
try
{
String newPushState = remoteFolder.getNewPushState(localFolder.getPushState(), message);
if (newPushState != null)
{
localFolder.setPushState(newPushState);
}
if (message.isSet(Flag.DELETED) || message.olderThan(earliestDate))
{
if (K9.DEBUG)
{
if (message.isSet(Flag.DELETED))
{
Log.v(K9.LOG_TAG, "Newly downloaded message " + account + ":" + folder + ":" + message.getUid()
+ " was marked deleted on server, skipping");
}
else
{
Log.d(K9.LOG_TAG, "Newly downloaded message " + message.getUid() + " is older than "
+ earliestDate + ", skipping");
}
}
progress.incrementAndGet();
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxProgress(account, folder, progress.get(), todo);
}
return;
}
if (message.getSize() > account.getMaximumAutoDownloadMessageSize())
{
largeMessages.add(message);
}
else
{
smallMessages.add(message);
}
// And include it in the view
if (message.getSubject() != null &&
message.getFrom() != null)
{
/*
* We check to make sure that we got something worth
* showing (subject and from) because some protocols
* (POP) may not be able to give us headers for
* ENVELOPE, only size.
*/
if (!isMessageSuppressed(account, folder, message))
{
// Store the new message locally
localFolder.appendMessages(new Message[]
{
message
});
Message localMessage = localFolder.getMessage(message.getUid());
syncFlags(localMessage, message);
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "About to notify listeners that we got a new unsynced message "
+ account + ":" + folder + ":" + message.getUid());
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxAddOrUpdateMessage(account, folder, localMessage);
}
}
}
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Error while storing downloaded message.", e);
addErrorMessage(account, null, e);
}
}
public void messageStarted(String uid, int number, int ofTotal)
{
}
public void messagesFinished(int total) {}
});
}
private boolean shouldImportMessage(final Account account, final String folder, final Message message, final AtomicInteger progress, final Date earliestDate)
{
if (isMessageSuppressed(account, folder, message))
{
if (K9.DEBUG)
{
Log.d(K9.LOG_TAG, "Message " + message.getUid() + " was suppressed "+
"but just downloaded. "+
"The race condition means we wasted some bandwidth. Oh well.");
}
return false;
}
if (message.olderThan(earliestDate))
{
if (K9.DEBUG)
{
Log.d(K9.LOG_TAG, "Message " + message.getUid() + " is older than "
+ earliestDate + ", hence not saving");
}
return false;
}
return true;
}
private void downloadSmallMessages(final Account account, final Folder remoteFolder,
final LocalFolder localFolder,
ArrayList<Message> smallMessages,
final AtomicInteger progress,
final int unreadBeforeStart,
final AtomicInteger newMessages,
final int todo,
FetchProfile fp) throws MessagingException
{
final String folder = remoteFolder.getName();
final Date earliestDate = account.getEarliestPollDate();
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "SYNC: Fetching small messages for folder " + folder);
remoteFolder.fetch(smallMessages.toArray(new Message[smallMessages.size()]),
fp, new MessageRetrievalListener()
{
public void messageFinished(Message message, int number, int ofTotal)
{
try
{
if (!shouldImportMessage(account, folder, message, progress, earliestDate))
{
progress.incrementAndGet();
return;
}
// Store the updated message locally
localFolder.appendMessages(new Message[] { message });
Message localMessage = localFolder.getMessage(message.getUid());
progress.incrementAndGet();
// Set a flag indicating this message has now be fully downloaded
localMessage.setFlag(Flag.X_DOWNLOADED_FULL, true);
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "About to notify listeners that we got a new small message "
+ account + ":" + folder + ":" + message.getUid());
// Update the listener with what we've found
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxAddOrUpdateMessage(account, folder, localMessage);
l.synchronizeMailboxProgress(account, folder, progress.get(), todo);
if (!localMessage.isSet(Flag.SEEN))
{
l.synchronizeMailboxNewMessage(account, folder, localMessage);
}
}
// Send a notification of this message
if (shouldNotifyForMessage(account, message))
{
newMessages.incrementAndGet();
notifyAccount(mApplication, account, message, unreadBeforeStart, newMessages);
}
}
catch (MessagingException me)
{
addErrorMessage(account, null, me);
Log.e(K9.LOG_TAG, "SYNC: fetch small messages", me);
}
}
public void messageStarted(String uid, int number, int ofTotal)
{
}
public void messagesFinished(int total) {}
});
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "SYNC: Done fetching small messages for folder " + folder);
}
private void downloadLargeMessages(final Account account, final Folder remoteFolder,
final LocalFolder localFolder,
ArrayList<Message> largeMessages,
final AtomicInteger progress,
final int unreadBeforeStart,
final AtomicInteger newMessages,
final int todo,
FetchProfile fp) throws MessagingException
{
final String folder = remoteFolder.getName();
final Date earliestDate = account.getEarliestPollDate();
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "SYNC: Fetching large messages for folder " + folder);
remoteFolder.fetch(largeMessages.toArray(new Message[largeMessages.size()]), fp, null);
for (Message message : largeMessages)
{
if (!shouldImportMessage(account, folder, message, progress, earliestDate))
{
progress.incrementAndGet();
continue;
}
if (message.getBody() == null)
{
/*
* The provider was unable to get the structure of the message, so
* we'll download a reasonable portion of the messge and mark it as
* incomplete so the entire thing can be downloaded later if the user
* wishes to download it.
*/
fp.clear();
fp.add(FetchProfile.Item.BODY_SANE);
/*
* TODO a good optimization here would be to make sure that all Stores set
* the proper size after this fetch and compare the before and after size. If
* they equal we can mark this SYNCHRONIZED instead of PARTIALLY_SYNCHRONIZED
*/
remoteFolder.fetch(new Message[] { message }, fp, null);
// Store the updated message locally
localFolder.appendMessages(new Message[] { message });
Message localMessage = localFolder.getMessage(message.getUid());
// Certain (POP3) servers give you the whole message even when you ask for only the first x Kb
if (!message.isSet(Flag.X_DOWNLOADED_FULL))
{
/*
* Mark the message as fully downloaded if the message size is smaller than
* the account's autodownload size limit, otherwise mark as only a partial
* download. This will prevent the system from downloading the same message
* twice.
*/
if (message.getSize() < account.getMaximumAutoDownloadMessageSize())
{
localMessage.setFlag(Flag.X_DOWNLOADED_FULL, true);
}
else
{
// Set a flag indicating that the message has been partially downloaded and
// is ready for view.
localMessage.setFlag(Flag.X_DOWNLOADED_PARTIAL, true);
}
}
}
else
{
/*
* We have a structure to deal with, from which
* we can pull down the parts we want to actually store.
* Build a list of parts we are interested in. Text parts will be downloaded
* right now, attachments will be left for later.
*/
ArrayList<Part> viewables = new ArrayList<Part>();
ArrayList<Part> attachments = new ArrayList<Part>();
MimeUtility.collectParts(message, viewables, attachments);
/*
* Now download the parts we're interested in storing.
*/
for (Part part : viewables)
{
remoteFolder.fetchPart(message, part, null);
}
// Store the updated message locally
localFolder.appendMessages(new Message[] { message });
Message localMessage = localFolder.getMessage(message.getUid());
// Set a flag indicating this message has been fully downloaded and can be
// viewed.
localMessage.setFlag(Flag.X_DOWNLOADED_PARTIAL, true);
}
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "About to notify listeners that we got a new large message "
+ account + ":" + folder + ":" + message.getUid());
// Update the listener with what we've found
progress.incrementAndGet();
Message localMessage = localFolder.getMessage(message.getUid());
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxAddOrUpdateMessage(account, folder, localMessage);
l.synchronizeMailboxProgress(account, folder, progress.get(), todo);
if (!localMessage.isSet(Flag.SEEN))
{
l.synchronizeMailboxNewMessage(account, folder, localMessage);
}
}
// Send a notification of this message
if (shouldNotifyForMessage(account, message))
{
newMessages.incrementAndGet();
notifyAccount(mApplication, account, message, unreadBeforeStart, newMessages);
}
}//for large messsages
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "SYNC: Done fetching large messages for folder " + folder);
}
private void refreshLocalMessageFlags(final Account account, final Folder remoteFolder,
final LocalFolder localFolder,
ArrayList<Message> syncFlagMessages,
final AtomicInteger progress,
final int todo
) throws MessagingException
{
final String folder = remoteFolder.getName();
if (remoteFolder.supportsFetchingFlags())
{
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "SYNC: About to sync flags for "
+ syncFlagMessages.size() + " remote messages for folder " + folder);
FetchProfile fp = new FetchProfile();
fp.add(FetchProfile.Item.FLAGS);
List<Message> undeletedMessages = new LinkedList<Message>();
for (Message message : syncFlagMessages)
{
if (!message.isSet(Flag.DELETED))
{
undeletedMessages.add(message);
}
}
remoteFolder.fetch(undeletedMessages.toArray(EMPTY_MESSAGE_ARRAY), fp, null);
for (Message remoteMessage : syncFlagMessages)
{
Message localMessage = localFolder.getMessage(remoteMessage.getUid());
boolean messageChanged = syncFlags(localMessage, remoteMessage);
if (messageChanged)
{
if (localMessage.isSet(Flag.DELETED) || isMessageSuppressed(account, folder, localMessage))
{
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxRemovedMessage(account, folder, localMessage);
}
}
else
{
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxAddOrUpdateMessage(account, folder, localMessage);
}
}
}
progress.incrementAndGet();
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxProgress(account, folder, progress.get(), todo);
}
}
}
}
private boolean syncFlags(Message localMessage, Message remoteMessage) throws MessagingException
{
boolean messageChanged = false;
if (localMessage == null || localMessage.isSet(Flag.DELETED))
{
return false;
}
if (remoteMessage.isSet(Flag.DELETED))
{
if (localMessage.getFolder().getAccount().syncRemoteDeletions())
{
localMessage.setFlag(Flag.DELETED, true);
messageChanged = true;
}
}
else
{
for (Flag flag : new Flag[] { Flag.SEEN, Flag.FLAGGED, Flag.ANSWERED })
{
if (remoteMessage.isSet(flag) != localMessage.isSet(flag))
{
localMessage.setFlag(flag, remoteMessage.isSet(flag));
messageChanged = true;
}
}
}
return messageChanged;
}
private String getRootCauseMessage(Throwable t)
{
Throwable rootCause = t;
Throwable nextCause = rootCause;
do
{
nextCause = rootCause.getCause();
if (nextCause != null)
{
rootCause = nextCause;
}
}
while (nextCause != null);
return rootCause.getMessage();
}
private void queuePendingCommand(Account account, PendingCommand command)
{
try
{
LocalStore localStore = account.getLocalStore();
localStore.addPendingCommand(command);
}
catch (Exception e)
{
addErrorMessage(account, null, e);
throw new RuntimeException("Unable to enqueue pending command", e);
}
}
private void processPendingCommands(final Account account)
{
putBackground("processPendingCommands", null, new Runnable()
{
public void run()
{
try
{
processPendingCommandsSynchronous(account);
}
catch (UnavailableStorageException e)
{
Log.i(K9.LOG_TAG, "Failed to process pending command because storage is not available - trying again later.");
throw new UnavailableAccountException(e);
}
catch (MessagingException me)
{
Log.e(K9.LOG_TAG, "processPendingCommands", me);
addErrorMessage(account, null, me);
/*
* Ignore any exceptions from the commands. Commands will be processed
* on the next round.
*/
}
}
});
}
private void processPendingCommandsSynchronous(Account account) throws MessagingException
{
LocalStore localStore = account.getLocalStore();
ArrayList<PendingCommand> commands = localStore.getPendingCommands();
int progress = 0;
int todo = commands.size();
if (todo == 0)
{
return;
}
for (MessagingListener l : getListeners())
{
l.pendingCommandsProcessing(account);
l.synchronizeMailboxProgress(account, null, progress, todo);
}
PendingCommand processingCommand = null;
try
{
for (PendingCommand command : commands)
{
processingCommand = command;
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "Processing pending command '" + command + "'");
String[] components = command.command.split("\\.");
String commandTitle = components[components.length - 1];
for (MessagingListener l : getListeners())
{
l.pendingCommandStarted(account, commandTitle);
}
/*
* We specifically do not catch any exceptions here. If a command fails it is
* most likely due to a server or IO error and it must be retried before any
* other command processes. This maintains the order of the commands.
*/
try
{
if (PENDING_COMMAND_APPEND.equals(command.command))
{
processPendingAppend(command, account);
}
else if (PENDING_COMMAND_SET_FLAG_BULK.equals(command.command))
{
processPendingSetFlag(command, account);
}
else if (PENDING_COMMAND_SET_FLAG.equals(command.command))
{
processPendingSetFlagOld(command, account);
}
else if (PENDING_COMMAND_MARK_ALL_AS_READ.equals(command.command))
{
processPendingMarkAllAsRead(command, account);
}
else if (PENDING_COMMAND_MOVE_OR_COPY_BULK.equals(command.command))
{
processPendingMoveOrCopy(command, account);
}
else if (PENDING_COMMAND_MOVE_OR_COPY.equals(command.command))
{
processPendingMoveOrCopyOld(command, account);
}
else if (PENDING_COMMAND_EMPTY_TRASH.equals(command.command))
{
processPendingEmptyTrash(command, account);
}
else if (PENDING_COMMAND_EXPUNGE.equals(command.command))
{
processPendingExpunge(command, account);
}
localStore.removePendingCommand(command);
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "Done processing pending command '" + command + "'");
}
catch (MessagingException me)
{
if (me.isPermanentFailure())
{
addErrorMessage(account, null, me);
Log.e(K9.LOG_TAG, "Failure of command '" + command + "' was permanent, removing command from queue");
localStore.removePendingCommand(processingCommand);
}
else
{
throw me;
}
}
finally
{
progress++;
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxProgress(account, null, progress, todo);
l.pendingCommandCompleted(account, commandTitle);
}
}
}
}
catch (MessagingException me)
{
addErrorMessage(account, null, me);
Log.e(K9.LOG_TAG, "Could not process command '" + processingCommand + "'", me);
throw me;
}
finally
{
for (MessagingListener l : getListeners())
{
l.pendingCommandsFinished(account);
}
}
}
/**
* Process a pending append message command. This command uploads a local message to the
* server, first checking to be sure that the server message is not newer than
* the local message. Once the local message is successfully processed it is deleted so
* that the server message will be synchronized down without an additional copy being
* created.
* TODO update the local message UID instead of deleteing it
*
* @param command arguments = (String folder, String uid)
* @param account
* @throws MessagingException
*/
private void processPendingAppend(PendingCommand command, Account account)
throws MessagingException
{
Folder remoteFolder = null;
LocalFolder localFolder = null;
try
{
String folder = command.arguments[0];
String uid = command.arguments[1];
if (account.getErrorFolderName().equals(folder))
{
return;
}
LocalStore localStore = account.getLocalStore();
localFolder = localStore.getFolder(folder);
LocalMessage localMessage = (LocalMessage) localFolder.getMessage(uid);
if (localMessage == null)
{
return;
}
Store remoteStore = account.getRemoteStore();
remoteFolder = remoteStore.getFolder(folder);
if (!remoteFolder.exists())
{
if (!remoteFolder.create(FolderType.HOLDS_MESSAGES))
{
return;
}
}
remoteFolder.open(OpenMode.READ_WRITE);
if (remoteFolder.getMode() != OpenMode.READ_WRITE)
{
return;
}
Message remoteMessage = null;
if (!localMessage.getUid().startsWith(K9.LOCAL_UID_PREFIX))
{
remoteMessage = remoteFolder.getMessage(localMessage.getUid());
}
if (remoteMessage == null)
{
if (localMessage.isSet(Flag.X_REMOTE_COPY_STARTED))
{
Log.w(K9.LOG_TAG, "Local message with uid " + localMessage.getUid() +
" has flag " + Flag.X_REMOTE_COPY_STARTED + " already set, checking for remote message with " +
" same message id");
String rUid = remoteFolder.getUidFromMessageId(localMessage);
if (rUid != null)
{
Log.w(K9.LOG_TAG, "Local message has flag " + Flag.X_REMOTE_COPY_STARTED + " already set, and there is a remote message with " +
" uid " + rUid + ", assuming message was already copied and aborting this copy");
String oldUid = localMessage.getUid();
localMessage.setUid(rUid);
localFolder.changeUid(localMessage);
for (MessagingListener l : getListeners())
{
l.messageUidChanged(account, folder, oldUid, localMessage.getUid());
}
return;
}
else
{
Log.w(K9.LOG_TAG, "No remote message with message-id found, proceeding with append");
}
}
/*
* If the message does not exist remotely we just upload it and then
* update our local copy with the new uid.
*/
FetchProfile fp = new FetchProfile();
fp.add(FetchProfile.Item.BODY);
localFolder.fetch(new Message[]
{
localMessage
}
, fp, null);
String oldUid = localMessage.getUid();
localMessage.setFlag(Flag.X_REMOTE_COPY_STARTED, true);
remoteFolder.appendMessages(new Message[] { localMessage });
localFolder.changeUid(localMessage);
for (MessagingListener l : getListeners())
{
l.messageUidChanged(account, folder, oldUid, localMessage.getUid());
}
}
else
{
/*
* If the remote message exists we need to determine which copy to keep.
*/
/*
* See if the remote message is newer than ours.
*/
FetchProfile fp = new FetchProfile();
fp.add(FetchProfile.Item.ENVELOPE);
remoteFolder.fetch(new Message[] { remoteMessage }, fp, null);
Date localDate = localMessage.getInternalDate();
Date remoteDate = remoteMessage.getInternalDate();
if (remoteDate != null && remoteDate.compareTo(localDate) > 0)
{
/*
* If the remote message is newer than ours we'll just
* delete ours and move on. A sync will get the server message
* if we need to be able to see it.
*/
localMessage.destroy();
}
else
{
/*
* Otherwise we'll upload our message and then delete the remote message.
*/
fp.clear();
fp = new FetchProfile();
fp.add(FetchProfile.Item.BODY);
localFolder.fetch(new Message[] { localMessage }, fp, null);
String oldUid = localMessage.getUid();
localMessage.setFlag(Flag.X_REMOTE_COPY_STARTED, true);
remoteFolder.appendMessages(new Message[] { localMessage });
localFolder.changeUid(localMessage);
for (MessagingListener l : getListeners())
{
l.messageUidChanged(account, folder, oldUid, localMessage.getUid());
}
if (remoteDate != null)
{
remoteMessage.setFlag(Flag.DELETED, true);
if (Account.EXPUNGE_IMMEDIATELY.equals(account.getExpungePolicy()))
{
remoteFolder.expunge();
}
}
}
}
}
finally
{
if (remoteFolder != null)
{
remoteFolder.close();
}
if (localFolder != null)
{
localFolder.close();
}
}
}
private void queueMoveOrCopy(Account account, String srcFolder, String destFolder, boolean isCopy, String uids[])
{
if (account.getErrorFolderName().equals(srcFolder))
{
return;
}
PendingCommand command = new PendingCommand();
command.command = PENDING_COMMAND_MOVE_OR_COPY_BULK;
int length = 3 + uids.length;
command.arguments = new String[length];
command.arguments[0] = srcFolder;
command.arguments[1] = destFolder;
command.arguments[2] = Boolean.toString(isCopy);
for (int i = 0; i < uids.length; i++)
{
command.arguments[3 + i] = uids[i];
}
queuePendingCommand(account, command);
}
/**
* Process a pending trash message command.
*
* @param command arguments = (String folder, String uid)
* @param account
* @throws MessagingException
*/
private void processPendingMoveOrCopy(PendingCommand command, Account account)
throws MessagingException
{
Folder remoteSrcFolder = null;
Folder remoteDestFolder = null;
try
{
String srcFolder = command.arguments[0];
if (account.getErrorFolderName().equals(srcFolder))
{
return;
}
String destFolder = command.arguments[1];
String isCopyS = command.arguments[2];
Store remoteStore = account.getRemoteStore();
remoteSrcFolder = remoteStore.getFolder(srcFolder);
List<Message> messages = new ArrayList<Message>();
for (int i = 3; i < command.arguments.length; i++)
{
String uid = command.arguments[i];
if (!uid.startsWith(K9.LOCAL_UID_PREFIX))
{
messages.add(remoteSrcFolder.getMessage(uid));
}
}
boolean isCopy = false;
if (isCopyS != null)
{
isCopy = Boolean.parseBoolean(isCopyS);
}
if (!remoteSrcFolder.exists())
{
throw new MessagingException("processingPendingMoveOrCopy: remoteFolder " + srcFolder + " does not exist", true);
}
remoteSrcFolder.open(OpenMode.READ_WRITE);
if (remoteSrcFolder.getMode() != OpenMode.READ_WRITE)
{
throw new MessagingException("processingPendingMoveOrCopy: could not open remoteSrcFolder " + srcFolder + " read/write", true);
}
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "processingPendingMoveOrCopy: source folder = " + srcFolder
+ ", " + messages.size() + " messages, destination folder = " + destFolder + ", isCopy = " + isCopy);
if (!isCopy && destFolder.equals(account.getTrashFolderName()))
{
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "processingPendingMoveOrCopy doing special case for deleting message");
String destFolderName = destFolder;
if (K9.FOLDER_NONE.equals(destFolderName))
{
destFolderName = null;
}
remoteSrcFolder.delete(messages.toArray(EMPTY_MESSAGE_ARRAY), destFolderName);
}
else
{
remoteDestFolder = remoteStore.getFolder(destFolder);
if (isCopy)
{
remoteSrcFolder.copyMessages(messages.toArray(EMPTY_MESSAGE_ARRAY), remoteDestFolder);
}
else
{
remoteSrcFolder.moveMessages(messages.toArray(EMPTY_MESSAGE_ARRAY), remoteDestFolder);
}
}
if (!isCopy && Account.EXPUNGE_IMMEDIATELY.equals(account.getExpungePolicy()))
{
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "processingPendingMoveOrCopy expunging folder " + account.getDescription() + ":" + srcFolder);
remoteSrcFolder.expunge();
}
}
finally
{
if (remoteSrcFolder != null)
{
remoteSrcFolder.close();
}
if (remoteDestFolder != null)
{
remoteDestFolder.close();
}
}
}
private void queueSetFlag(final Account account, final String folderName, final String newState, final String flag, final String[] uids)
{
putBackground("queueSetFlag " + account.getDescription() + ":" + folderName, null, new Runnable()
{
public void run()
{
PendingCommand command = new PendingCommand();
command.command = PENDING_COMMAND_SET_FLAG_BULK;
int length = 3 + uids.length;
command.arguments = new String[length];
command.arguments[0] = folderName;
command.arguments[1] = newState;
command.arguments[2] = flag;
for (int i = 0; i < uids.length; i++)
{
command.arguments[3 + i] = uids[i];
}
queuePendingCommand(account, command);
processPendingCommands(account);
}
});
}
/**
* Processes a pending mark read or unread command.
*
* @param command arguments = (String folder, String uid, boolean read)
* @param account
*/
private void processPendingSetFlag(PendingCommand command, Account account)
throws MessagingException
{
String folder = command.arguments[0];
if (account.getErrorFolderName().equals(folder))
{
return;
}
boolean newState = Boolean.parseBoolean(command.arguments[1]);
Flag flag = Flag.valueOf(command.arguments[2]);
Store remoteStore = account.getRemoteStore();
Folder remoteFolder = remoteStore.getFolder(folder);
if (!remoteFolder.exists() ||
/*
* Don't proceed if the remote folder doesn't support flags and
* the flag to be changed isn't the deleted flag. This avoids
* unnecessary connections to POP3 servers.
*/
// TODO: This should actually call a supportsSettingFlag(flag) method.
(!remoteFolder.supportsFetchingFlags() && !Flag.DELETED.equals(flag)))
{
return;
}
try
{
remoteFolder.open(OpenMode.READ_WRITE);
if (remoteFolder.getMode() != OpenMode.READ_WRITE)
{
return;
}
List<Message> messages = new ArrayList<Message>();
for (int i = 3; i < command.arguments.length; i++)
{
String uid = command.arguments[i];
if (!uid.startsWith(K9.LOCAL_UID_PREFIX))
{
messages.add(remoteFolder.getMessage(uid));
}
}
if (messages.size() == 0)
{
return;
}
remoteFolder.setFlags(messages.toArray(EMPTY_MESSAGE_ARRAY), new Flag[] { flag }, newState);
}
finally
{
if (remoteFolder != null)
{
remoteFolder.close();
}
}
}
// TODO: This method is obsolete and is only for transition from K-9 2.0 to K-9 2.1
// Eventually, it should be removed
private void processPendingSetFlagOld(PendingCommand command, Account account)
throws MessagingException
{
String folder = command.arguments[0];
String uid = command.arguments[1];
if (account.getErrorFolderName().equals(folder))
{
return;
}
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "processPendingSetFlagOld: folder = " + folder + ", uid = " + uid);
boolean newState = Boolean.parseBoolean(command.arguments[2]);
Flag flag = Flag.valueOf(command.arguments[3]);
Folder remoteFolder = null;
try
{
Store remoteStore = account.getRemoteStore();
remoteFolder = remoteStore.getFolder(folder);
if (!remoteFolder.exists())
{
return;
}
remoteFolder.open(OpenMode.READ_WRITE);
if (remoteFolder.getMode() != OpenMode.READ_WRITE)
{
return;
}
Message remoteMessage = null;
if (!uid.startsWith(K9.LOCAL_UID_PREFIX))
{
remoteMessage = remoteFolder.getMessage(uid);
}
if (remoteMessage == null)
{
return;
}
remoteMessage.setFlag(flag, newState);
}
finally
{
if (remoteFolder != null)
{
remoteFolder.close();
}
}
}
private void queueExpunge(final Account account, final String folderName)
{
putBackground("queueExpunge " + account.getDescription() + ":" + folderName, null, new Runnable()
{
public void run()
{
PendingCommand command = new PendingCommand();
command.command = PENDING_COMMAND_EXPUNGE;
command.arguments = new String[1];
command.arguments[0] = folderName;
queuePendingCommand(account, command);
processPendingCommands(account);
}
});
}
private void processPendingExpunge(PendingCommand command, Account account)
throws MessagingException
{
String folder = command.arguments[0];
if (account.getErrorFolderName().equals(folder))
{
return;
}
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "processPendingExpunge: folder = " + folder);
Store remoteStore = account.getRemoteStore();
Folder remoteFolder = remoteStore.getFolder(folder);
try
{
if (!remoteFolder.exists())
{
return;
}
remoteFolder.open(OpenMode.READ_WRITE);
if (remoteFolder.getMode() != OpenMode.READ_WRITE)
{
return;
}
remoteFolder.expunge();
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "processPendingExpunge: complete for folder = " + folder);
}
finally
{
if (remoteFolder != null)
{
remoteFolder.close();
}
}
}
// TODO: This method is obsolete and is only for transition from K-9 2.0 to K-9 2.1
// Eventually, it should be removed
private void processPendingMoveOrCopyOld(PendingCommand command, Account account)
throws MessagingException
{
String srcFolder = command.arguments[0];
String uid = command.arguments[1];
String destFolder = command.arguments[2];
String isCopyS = command.arguments[3];
boolean isCopy = false;
if (isCopyS != null)
{
isCopy = Boolean.parseBoolean(isCopyS);
}
if (account.getErrorFolderName().equals(srcFolder))
{
return;
}
Store remoteStore = account.getRemoteStore();
Folder remoteSrcFolder = remoteStore.getFolder(srcFolder);
Folder remoteDestFolder = remoteStore.getFolder(destFolder);
if (!remoteSrcFolder.exists())
{
throw new MessagingException("processPendingMoveOrCopyOld: remoteFolder " + srcFolder + " does not exist", true);
}
remoteSrcFolder.open(OpenMode.READ_WRITE);
if (remoteSrcFolder.getMode() != OpenMode.READ_WRITE)
{
throw new MessagingException("processPendingMoveOrCopyOld: could not open remoteSrcFolder " + srcFolder + " read/write", true);
}
Message remoteMessage = null;
if (!uid.startsWith(K9.LOCAL_UID_PREFIX))
{
remoteMessage = remoteSrcFolder.getMessage(uid);
}
if (remoteMessage == null)
{
throw new MessagingException("processPendingMoveOrCopyOld: remoteMessage " + uid + " does not exist", true);
}
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "processPendingMoveOrCopyOld: source folder = " + srcFolder
+ ", uid = " + uid + ", destination folder = " + destFolder + ", isCopy = " + isCopy);
if (!isCopy && destFolder.equals(account.getTrashFolderName()))
{
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "processPendingMoveOrCopyOld doing special case for deleting message");
remoteMessage.delete(account.getTrashFolderName());
remoteSrcFolder.close();
return;
}
remoteDestFolder.open(OpenMode.READ_WRITE);
if (remoteDestFolder.getMode() != OpenMode.READ_WRITE)
{
throw new MessagingException("processPendingMoveOrCopyOld: could not open remoteDestFolder " + srcFolder + " read/write", true);
}
if (isCopy)
{
remoteSrcFolder.copyMessages(new Message[] { remoteMessage }, remoteDestFolder);
}
else
{
remoteSrcFolder.moveMessages(new Message[] { remoteMessage }, remoteDestFolder);
}
remoteSrcFolder.close();
remoteDestFolder.close();
}
private void processPendingMarkAllAsRead(PendingCommand command, Account account) throws MessagingException
{
String folder = command.arguments[0];
Folder remoteFolder = null;
LocalFolder localFolder = null;
try
{
Store localStore = account.getLocalStore();
localFolder = (LocalFolder) localStore.getFolder(folder);
localFolder.open(OpenMode.READ_WRITE);
Message[] messages = localFolder.getMessages(null, false);
for (Message message : messages)
{
if (!message.isSet(Flag.SEEN))
{
message.setFlag(Flag.SEEN, true);
for (MessagingListener l : getListeners())
{
l.listLocalMessagesUpdateMessage(account, folder, message);
}
}
}
localFolder.setUnreadMessageCount(0);
for (MessagingListener l : getListeners())
{
l.folderStatusChanged(account, folder, 0);
}
if (account.getErrorFolderName().equals(folder))
{
return;
}
Store remoteStore = account.getRemoteStore();
remoteFolder = remoteStore.getFolder(folder);
if (!remoteFolder.exists())
{
return;
}
remoteFolder.open(OpenMode.READ_WRITE);
if (remoteFolder.getMode() != OpenMode.READ_WRITE)
{
return;
}
remoteFolder.setFlags(new Flag[] {Flag.SEEN}, true);
remoteFolder.close();
}
catch (UnsupportedOperationException uoe)
{
Log.w(K9.LOG_TAG, "Could not mark all server-side as read because store doesn't support operation", uoe);
}
finally
{
if (localFolder != null)
{
localFolder.close();
}
if (remoteFolder != null)
{
remoteFolder.close();
}
}
}
static long uidfill = 0;
static AtomicBoolean loopCatch = new AtomicBoolean();
public void addErrorMessage(Account account, String subject, Throwable t)
{
if (!loopCatch.compareAndSet(false, true))
{
return;
}
try
{
if (t == null)
{
return;
}
CharArrayWriter baos = new CharArrayWriter(t.getStackTrace().length * 10);
PrintWriter ps = new PrintWriter(baos);
t.printStackTrace(ps);
ps.close();
if (subject == null)
{
subject = getRootCauseMessage(t);
}
addErrorMessage(account, subject, baos.toString());
}
catch (Throwable it)
{
Log.e(K9.LOG_TAG, "Could not save error message to " + account.getErrorFolderName(), it);
}
finally
{
loopCatch.set(false);
}
}
public void addErrorMessage(Account account, String subject, String body)
{
if (!K9.ENABLE_ERROR_FOLDER)
{
return;
}
if (!loopCatch.compareAndSet(false, true))
{
return;
}
try
{
if (body == null || body.length() < 1)
{
return;
}
Store localStore = account.getLocalStore();
LocalFolder localFolder = (LocalFolder)localStore.getFolder(account.getErrorFolderName());
Message[] messages = new Message[1];
MimeMessage message = new MimeMessage();
message.setBody(new TextBody(body));
message.setFlag(Flag.X_DOWNLOADED_FULL, true);
message.setSubject(subject);
long nowTime = System.currentTimeMillis();
Date nowDate = new Date(nowTime);
message.setInternalDate(nowDate);
message.addSentDate(nowDate);
message.setFrom(new Address(account.getEmail(), "K9mail internal"));
messages[0] = message;
localFolder.appendMessages(messages);
localFolder.clearMessagesOlderThan(nowTime - (15 * 60 * 1000));
}
catch (Throwable it)
{
Log.e(K9.LOG_TAG, "Could not save error message to " + account.getErrorFolderName(), it);
}
finally
{
loopCatch.set(false);
}
}
public void markAllMessagesRead(final Account account, final String folder)
{
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Marking all messages in " + account.getDescription() + ":" + folder + " as read");
List<String> args = new ArrayList<String>();
args.add(folder);
PendingCommand command = new PendingCommand();
command.command = PENDING_COMMAND_MARK_ALL_AS_READ;
command.arguments = args.toArray(EMPTY_STRING_ARRAY);
queuePendingCommand(account, command);
processPendingCommands(account);
}
public void setFlag(
final Message[] messages,
final Flag flag,
final boolean newState)
{
actOnMessages(messages, new MessageActor()
{
@Override
public void act(final Account account, final Folder folder,
final List<Message> messages)
{
String[] uids = new String[messages.size()];
for (int i = 0; i < messages.size(); i++)
{
uids[i] = messages.get(i).getUid();
}
setFlag(account, folder.getName(), uids, flag, newState);
}
});
}
public void setFlag(
final Account account,
final String folderName,
final String[] uids,
final Flag flag,
final boolean newState)
{
// TODO: put this into the background, but right now that causes odd behavior
// because the FolderMessageList doesn't have its own cache of the flag states
Folder localFolder = null;
try
{
Store localStore = account.getLocalStore();
localFolder = localStore.getFolder(folderName);
localFolder.open(OpenMode.READ_WRITE);
ArrayList<Message> messages = new ArrayList<Message>();
for (String uid : uids)
{
// Allows for re-allowing sending of messages that could not be sent
if (flag == Flag.FLAGGED && !newState
&& uid != null
&& account.getOutboxFolderName().equals(folderName))
{
sendCount.remove(uid);
}
Message msg = localFolder.getMessage(uid);
if (msg != null)
{
messages.add(msg);
}
}
localFolder.setFlags(messages.toArray(EMPTY_MESSAGE_ARRAY), new Flag[] {flag}, newState);
for (MessagingListener l : getListeners())
{
l.folderStatusChanged(account, folderName, localFolder.getUnreadMessageCount());
}
if (account.getErrorFolderName().equals(folderName))
{
return;
}
queueSetFlag(account, folderName, Boolean.toString(newState), flag.toString(), uids);
processPendingCommands(account);
}
catch (MessagingException me)
{
addErrorMessage(account, null, me);
throw new RuntimeException(me);
}
finally
{
if (localFolder != null)
{
localFolder.close();
}
}
}//setMesssageFlag
public void clearAllPending(final Account account)
{
try
{
Log.w(K9.LOG_TAG, "Clearing pending commands!");
LocalStore localStore = account.getLocalStore();
localStore.removePendingCommands();
}
catch (MessagingException me)
{
Log.e(K9.LOG_TAG, "Unable to clear pending command", me);
addErrorMessage(account, null, me);
}
}
public void loadMessageForViewRemote(final Account account, final String folder,
final String uid, final MessagingListener listener)
{
put("loadMessageForViewRemote", listener, new Runnable()
{
public void run()
{
Folder remoteFolder = null;
LocalFolder localFolder = null;
try
{
LocalStore localStore = account.getLocalStore();
localFolder = localStore.getFolder(folder);
localFolder.open(OpenMode.READ_WRITE);
Message message = localFolder.getMessage(uid);
if (message.isSet(Flag.X_DOWNLOADED_FULL))
{
/*
* If the message has been synchronized since we were called we'll
* just hand it back cause it's ready to go.
*/
FetchProfile fp = new FetchProfile();
fp.add(FetchProfile.Item.ENVELOPE);
fp.add(FetchProfile.Item.BODY);
localFolder.fetch(new Message[] { message }, fp, null);
}
else
{
/*
* At this point the message is not available, so we need to download it
* fully if possible.
*/
Store remoteStore = account.getRemoteStore();
remoteFolder = remoteStore.getFolder(folder);
remoteFolder.open(OpenMode.READ_WRITE);
// Get the remote message and fully download it
Message remoteMessage = remoteFolder.getMessage(uid);
FetchProfile fp = new FetchProfile();
fp.add(FetchProfile.Item.BODY);
remoteFolder.fetch(new Message[] { remoteMessage }, fp, null);
// Store the message locally and load the stored message into memory
localFolder.appendMessages(new Message[] { remoteMessage });
fp.add(FetchProfile.Item.ENVELOPE);
message = localFolder.getMessage(uid);
localFolder.fetch(new Message[] { message }, fp, null);
// Mark that this message is now fully synched
message.setFlag(Flag.X_DOWNLOADED_FULL, true);
}
// now that we have the full message, refresh the headers
for (MessagingListener l : getListeners(listener))
{
l.loadMessageForViewHeadersAvailable(account, folder, uid, message);
}
for (MessagingListener l : getListeners(listener))
{
l.loadMessageForViewBodyAvailable(account, folder, uid, message);
}
for (MessagingListener l : getListeners(listener))
{
l.loadMessageForViewFinished(account, folder, uid, message);
}
}
catch (Exception e)
{
for (MessagingListener l : getListeners(listener))
{
l.loadMessageForViewFailed(account, folder, uid, e);
}
addErrorMessage(account, null, e);
}
finally
{
if (remoteFolder!=null)
{
remoteFolder.close();
}
if (localFolder!=null)
{
localFolder.close();
}
}//finally
}//run
});
}
public void loadMessageForView(final Account account, final String folder, final String uid,
final MessagingListener listener)
{
for (MessagingListener l : getListeners(listener))
{
l.loadMessageForViewStarted(account, folder, uid);
}
threadPool.execute(new Runnable()
{
public void run()
{
try
{
LocalStore localStore = account.getLocalStore();
LocalFolder localFolder = localStore.getFolder(folder);
localFolder.open(OpenMode.READ_WRITE);
LocalMessage message = (LocalMessage)localFolder.getMessage(uid);
if (message==null
|| message.getId()==0)
{
throw new IllegalArgumentException("Message not found: folder=" + folder + ", uid=" + uid);
}
if (!message.isSet(Flag.SEEN))
{
message.setFlag(Flag.SEEN, true);
setFlag(new Message[] { message }, Flag.SEEN, true);
}
for (MessagingListener l : getListeners(listener))
{
l.loadMessageForViewHeadersAvailable(account, folder, uid, message);
}
FetchProfile fp = new FetchProfile();
fp.add(FetchProfile.Item.ENVELOPE);
fp.add(FetchProfile.Item.BODY);
localFolder.fetch(new Message[]
{
message
}, fp, null);
localFolder.close();
for (MessagingListener l : getListeners(listener))
{
l.loadMessageForViewBodyAvailable(account, folder, uid, message);
}
for (MessagingListener l : getListeners(listener))
{
l.loadMessageForViewFinished(account, folder, uid, message);
}
}
catch (Exception e)
{
for (MessagingListener l : getListeners(listener))
{
l.loadMessageForViewFailed(account, folder, uid, e);
}
addErrorMessage(account, null, e);
}
}
});
}
/**
* Attempts to load the attachment specified by part from the given account and message.
* @param account
* @param message
* @param part
* @param listener
*/
public void loadAttachment(
final Account account,
final Message message,
final Part part,
final Object tag,
final MessagingListener listener)
{
/*
* Check if the attachment has already been downloaded. If it has there's no reason to
* download it, so we just tell the listener that it's ready to go.
*/
try
{
if (part.getBody() != null)
{
for (MessagingListener l : getListeners())
{
l.loadAttachmentStarted(account, message, part, tag, false);
}
if (listener != null)
{
listener.loadAttachmentStarted(account, message, part, tag, false);
}
for (MessagingListener l : getListeners())
{
l.loadAttachmentFinished(account, message, part, tag);
}
if (listener != null)
{
listener.loadAttachmentFinished(account, message, part, tag);
}
return;
}
}
catch (MessagingException me)
{
/*
* If the header isn't there the attachment isn't downloaded yet, so just continue
* on.
*/
}
for (MessagingListener l : getListeners())
{
l.loadAttachmentStarted(account, message, part, tag, true);
}
if (listener != null)
{
listener.loadAttachmentStarted(account, message, part, tag, false);
}
put("loadAttachment", listener, new Runnable()
{
public void run()
{
Folder remoteFolder = null;
LocalFolder localFolder = null;
try
{
LocalStore localStore = account.getLocalStore();
/*
* We clear out any attachments already cached in the entire store and then
* we update the passed in message to reflect that there are no cached
* attachments. This is in support of limiting the account to having one
* attachment downloaded at a time.
*/
localStore.pruneCachedAttachments();
ArrayList<Part> viewables = new ArrayList<Part>();
ArrayList<Part> attachments = new ArrayList<Part>();
MimeUtility.collectParts(message, viewables, attachments);
for (Part attachment : attachments)
{
attachment.setBody(null);
}
Store remoteStore = account.getRemoteStore();
localFolder = localStore.getFolder(message.getFolder().getName());
remoteFolder = remoteStore.getFolder(message.getFolder().getName());
remoteFolder.open(OpenMode.READ_WRITE);
//FIXME: This is an ugly hack that won't be needed once the Message objects have been united.
Message remoteMessage = remoteFolder.getMessage(message.getUid());
remoteMessage.setBody(message.getBody());
remoteFolder.fetchPart(remoteMessage, part, null);
localFolder.updateMessage((LocalMessage)message);
for (MessagingListener l : getListeners())
{
l.loadAttachmentFinished(account, message, part, tag);
}
if (listener != null)
{
listener.loadAttachmentFinished(account, message, part, tag);
}
}
catch (MessagingException me)
{
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Exception loading attachment", me);
for (MessagingListener l : getListeners())
{
l.loadAttachmentFailed(account, message, part, tag, me.getMessage());
}
if (listener != null)
{
listener.loadAttachmentFailed(account, message, part, tag, me.getMessage());
}
addErrorMessage(account, null, me);
}
finally
{
if (remoteFolder != null)
{
remoteFolder.close();
}
if (localFolder != null)
{
localFolder.close();
}
}
}
});
}
/**
* Stores the given message in the Outbox and starts a sendPendingMessages command to
* attempt to send the message.
* @param account
* @param message
* @param listener
*/
public void sendMessage(final Account account,
final Message message,
MessagingListener listener)
{
try
{
LocalStore localStore = account.getLocalStore();
LocalFolder localFolder = localStore.getFolder(account.getOutboxFolderName());
localFolder.open(OpenMode.READ_WRITE);
localFolder.appendMessages(new Message[]
{
message
});
Message localMessage = localFolder.getMessage(message.getUid());
localMessage.setFlag(Flag.X_DOWNLOADED_FULL, true);
localFolder.close();
sendPendingMessages(account, listener);
}
catch (Exception e)
{
/*
for (MessagingListener l : getListeners())
{
// TODO general failed
}
*/
addErrorMessage(account, null, e);
}
}
public void sendPendingMessages(MessagingListener listener)
{
final Preferences prefs = Preferences.getPreferences(mApplication.getApplicationContext());
Account[] accounts = prefs.getAccounts();
for (Account account : accounts)
{
sendPendingMessages(account, listener);
}
}
/**
* Attempt to send any messages that are sitting in the Outbox.
* @param account
* @param listener
*/
public void sendPendingMessages(final Account account,
MessagingListener listener)
{
putBackground("sendPendingMessages", listener, new Runnable()
{
public void run()
{
if (!account.isAvailable(mApplication))
{
throw new UnavailableAccountException();
}
if (messagesPendingSend(account))
{
NotificationManager notifMgr =
(NotificationManager)mApplication.getSystemService(Context.NOTIFICATION_SERVICE);
if (account.isShowOngoing())
{
Notification notif = new Notification(R.drawable.ic_menu_refresh,
mApplication.getString(R.string.notification_bg_send_ticker, account.getDescription()), System.currentTimeMillis());
Intent intent = MessageList.actionHandleFolderIntent(mApplication, account, K9.INBOX);
PendingIntent pi = PendingIntent.getActivity(mApplication, 0, intent, 0);
notif.setLatestEventInfo(mApplication, mApplication.getString(R.string.notification_bg_send_title),
account.getDescription() , pi);
notif.flags = Notification.FLAG_ONGOING_EVENT;
if (K9.NOTIFICATION_LED_WHILE_SYNCING)
{
notif.flags |= Notification.FLAG_SHOW_LIGHTS;
notif.ledARGB = account.getNotificationSetting().getLedColor();
notif.ledOnMS = K9.NOTIFICATION_LED_FAST_ON_TIME;
notif.ledOffMS = K9.NOTIFICATION_LED_FAST_OFF_TIME;
}
notifMgr.notify(K9.FETCHING_EMAIL_NOTIFICATION - account.getAccountNumber(), notif);
}
try
{
sendPendingMessagesSynchronous(account);
}
finally
{
if (account.isShowOngoing())
{
notifMgr.cancel(K9.FETCHING_EMAIL_NOTIFICATION - account.getAccountNumber());
}
}
}
}
});
}
public boolean messagesPendingSend(final Account account)
{
Folder localFolder = null;
try
{
localFolder = account.getLocalStore().getFolder(
account.getOutboxFolderName());
if (!localFolder.exists())
{
return false;
}
localFolder.open(OpenMode.READ_WRITE);
if (localFolder.getMessageCount() > 0)
{
return true;
}
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Exception while checking for unsent messages", e);
}
finally
{
if (localFolder != null)
{
localFolder.close();
}
}
return false;
}
/**
* Attempt to send any messages that are sitting in the Outbox.
* @param account
* @param listener
*/
public void sendPendingMessagesSynchronous(final Account account)
{
Folder localFolder = null;
try
{
Store localStore = account.getLocalStore();
localFolder = localStore.getFolder(
account.getOutboxFolderName());
if (!localFolder.exists())
{
return;
}
for (MessagingListener l : getListeners())
{
l.sendPendingMessagesStarted(account);
}
localFolder.open(OpenMode.READ_WRITE);
Message[] localMessages = localFolder.getMessages(null);
Exception lastFailure = null;
int progress = 0;
int todo = localMessages.length;
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxProgress(account, account.getSentFolderName(), progress, todo);
}
/*
* The profile we will use to pull all of the content
* for a given local message into memory for sending.
*/
FetchProfile fp = new FetchProfile();
fp.add(FetchProfile.Item.ENVELOPE);
fp.add(FetchProfile.Item.BODY);
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Scanning folder '" + account.getOutboxFolderName() + "' (" + ((LocalFolder)localFolder).getId() + ") for messages to send");
Transport transport = Transport.getInstance(account);
for (Message message : localMessages)
{
if (message.isSet(Flag.DELETED))
{
message.destroy();
continue;
}
try
{
AtomicInteger count = new AtomicInteger(0);
AtomicInteger oldCount = sendCount.putIfAbsent(message.getUid(), count);
if (oldCount != null)
{
count = oldCount;
}
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Send count for message " + message.getUid() + " is " + count.get());
localFolder.fetch(new Message[] { message }, fp, null);
try
{
message.setFlag(Flag.X_SEND_IN_PROGRESS, true);
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Sending message with UID " + message.getUid());
transport.sendMessage(message);
message.setFlag(Flag.X_SEND_IN_PROGRESS, false);
message.setFlag(Flag.SEEN, true);
progress++;
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxProgress(account, account.getSentFolderName(), progress, todo);
}
if (K9.FOLDER_NONE.equals(account.getSentFolderName()))
{
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Sent folder set to " + K9.FOLDER_NONE + ", deleting sent message");
message.setFlag(Flag.DELETED, true);
}
else
{
LocalFolder localSentFolder = (LocalFolder) localStore.getFolder( account.getSentFolderName());
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Moving sent message to folder '" + account.getSentFolderName() + "' (" + localSentFolder.getId() + ") ");
localFolder.moveMessages( new Message[] { message }, localSentFolder);
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Moved sent message to folder '" + account.getSentFolderName() + "' (" + localSentFolder.getId() + ") ");
PendingCommand command = new PendingCommand();
command.command = PENDING_COMMAND_APPEND;
command.arguments = new String[] { localSentFolder.getName(), message.getUid() };
queuePendingCommand(account, command);
processPendingCommands(account);
}
}
catch (Exception e)
{
message.setFlag(Flag.X_SEND_FAILED, true);
Log.e(K9.LOG_TAG, "Failed to send message", e);
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxFailed( account, localFolder.getName(), getRootCauseMessage(e));
}
lastFailure = e;
}
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Failed to fetch message for sending", e);
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxFailed( account, localFolder.getName(), getRootCauseMessage(e));
}
lastFailure = e;
}
}
if (localFolder.getMessageCount() == 0)
{
localFolder.delete(false);
}
for (MessagingListener l : getListeners())
{
l.sendPendingMessagesCompleted(account);
}
if (lastFailure != null)
{
NotificationManager notifMgr = (NotificationManager)mApplication.getSystemService(Context.NOTIFICATION_SERVICE);
Notification notif = new Notification(R.drawable.stat_notify_email_generic, mApplication.getString(R.string.send_failure_subject), System.currentTimeMillis());
Intent i = FolderList.actionHandleNotification(mApplication, account, account.getOutboxFolderName());
PendingIntent pi = PendingIntent.getActivity(mApplication, 0, i, 0);
notif.setLatestEventInfo(mApplication, mApplication.getString(R.string.send_failure_subject), lastFailure.getMessage(), pi);
notif.flags |= Notification.FLAG_SHOW_LIGHTS;
notif.flags |= Notification.FLAG_AUTO_CANCEL;
notif.ledARGB = K9.NOTIFICATION_LED_SENDING_FAILURE_COLOR;
notif.ledOnMS = K9.NOTIFICATION_LED_FAST_ON_TIME;
notif.ledOffMS = K9.NOTIFICATION_LED_FAST_OFF_TIME;
notifMgr.notify(-1500 - account.getAccountNumber(), notif);
}
}
catch (UnavailableStorageException e)
{
Log.i(K9.LOG_TAG, "Failed to send pending messages because storage is not available - trying again later.");
throw new UnavailableAccountException(e);
}
catch (Exception e)
{
for (MessagingListener l : getListeners())
{
l.sendPendingMessagesFailed(account);
}
addErrorMessage(account, null, e);
}
finally
{
if (localFolder != null)
{
try
{
localFolder.close();
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Exception while closing folder", e);
}
}
}
}
public void getAccountStats(final Context context, final Account account,
final MessagingListener l)
{
Runnable unreadRunnable = new Runnable()
{
public void run()
{
try
{
AccountStats stats = account.getStats(context);
l.accountStatusChanged(account, stats);
}
catch (MessagingException me)
{
Log.e(K9.LOG_TAG, "Count not get unread count for account " + account.getDescription(),
me);
}
}
};
put("getAccountStats:" + account.getDescription(), l, unreadRunnable);
}
public void getFolderUnreadMessageCount(final Account account, final String folderName,
final MessagingListener l)
{
Runnable unreadRunnable = new Runnable()
{
public void run()
{
int unreadMessageCount = 0;
try
{
Folder localFolder = account.getLocalStore().getFolder(folderName);
unreadMessageCount = localFolder.getUnreadMessageCount();
}
catch (MessagingException me)
{
Log.e(K9.LOG_TAG, "Count not get unread count for account " + account.getDescription(), me);
}
l.folderStatusChanged(account, folderName, unreadMessageCount);
}
};
put("getFolderUnread:" + account.getDescription() + ":" + folderName, l, unreadRunnable);
}
public boolean isMoveCapable(Message message)
{
return !message.getUid().startsWith(K9.LOCAL_UID_PREFIX);
}
public boolean isCopyCapable(Message message)
{
return isMoveCapable(message);
}
public boolean isMoveCapable(final Account account)
{
try
{
Store localStore = account.getLocalStore();
Store remoteStore = account.getRemoteStore();
return localStore.isMoveCapable() && remoteStore.isMoveCapable();
}
catch (MessagingException me)
{
Log.e(K9.LOG_TAG, "Exception while ascertaining move capability", me);
return false;
}
}
public boolean isCopyCapable(final Account account)
{
try
{
Store localStore = account.getLocalStore();
Store remoteStore = account.getRemoteStore();
return localStore.isCopyCapable() && remoteStore.isCopyCapable();
}
catch (MessagingException me)
{
Log.e(K9.LOG_TAG, "Exception while ascertaining copy capability", me);
return false;
}
}
public void moveMessages(final Account account, final String srcFolder, final Message[] messages, final String destFolder,
final MessagingListener listener)
{
for (Message message : messages)
{
suppressMessage(account, srcFolder, message);
}
putBackground("moveMessages", null, new Runnable()
{
public void run()
{
moveOrCopyMessageSynchronous(account, srcFolder, messages, destFolder, false, listener);
}
});
}
public void moveMessage(final Account account, final String srcFolder, final Message message, final String destFolder,
final MessagingListener listener)
{
moveMessages(account, srcFolder, new Message[] { message }, destFolder, listener);
}
public void copyMessages(final Account account, final String srcFolder, final Message[] messages, final String destFolder,
final MessagingListener listener)
{
putBackground("copyMessages", null, new Runnable()
{
public void run()
{
moveOrCopyMessageSynchronous(account, srcFolder, messages, destFolder, true, listener);
}
});
}
public void copyMessage(final Account account, final String srcFolder, final Message message, final String destFolder,
final MessagingListener listener)
{
copyMessages(account, srcFolder, new Message[] { message }, destFolder, listener);
}
private void moveOrCopyMessageSynchronous(final Account account, final String srcFolder, final Message[] inMessages,
final String destFolder, final boolean isCopy, MessagingListener listener)
{
try
{
Store localStore = account.getLocalStore();
Store remoteStore = account.getRemoteStore();
if (!isCopy && (!remoteStore.isMoveCapable() || !localStore.isMoveCapable()))
{
return;
}
if (isCopy && (!remoteStore.isCopyCapable() || !localStore.isCopyCapable()))
{
return;
}
Folder localSrcFolder = localStore.getFolder(srcFolder);
Folder localDestFolder = localStore.getFolder(destFolder);
List<String> uids = new LinkedList<String>();
for (Message message : inMessages)
{
String uid = message.getUid();
if (!uid.startsWith(K9.LOCAL_UID_PREFIX))
{
uids.add(uid);
}
}
Message[] messages = localSrcFolder.getMessages(uids.toArray(EMPTY_STRING_ARRAY), null);
if (messages.length > 0)
{
Map<String, Message> origUidMap = new HashMap<String, Message>();
for (Message message : messages)
{
origUidMap.put(message.getUid(), message);
}
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "moveOrCopyMessageSynchronous: source folder = " + srcFolder
+ ", " + messages.length + " messages, " + ", destination folder = " + destFolder + ", isCopy = " + isCopy);
if (isCopy)
{
FetchProfile fp = new FetchProfile();
fp.add(FetchProfile.Item.ENVELOPE);
fp.add(FetchProfile.Item.BODY);
localSrcFolder.fetch(messages, fp, null);
localSrcFolder.copyMessages(messages, localDestFolder);
}
else
{
localSrcFolder.moveMessages(messages, localDestFolder);
for (String origUid : origUidMap.keySet())
{
for (MessagingListener l : getListeners())
{
l.messageUidChanged(account, srcFolder, origUid, origUidMap.get(origUid).getUid());
}
unsuppressMessage(account, srcFolder, origUid);
}
}
queueMoveOrCopy(account, srcFolder, destFolder, isCopy, origUidMap.keySet().toArray(EMPTY_STRING_ARRAY));
}
processPendingCommands(account);
}
catch (UnavailableStorageException e)
{
Log.i(K9.LOG_TAG, "Failed to move/copy message because storage is not available - trying again later.");
throw new UnavailableAccountException(e);
}
catch (MessagingException me)
{
addErrorMessage(account, null, me);
throw new RuntimeException("Error moving message", me);
}
}
public void expunge(final Account account, final String folder, final MessagingListener listener)
{
putBackground("expunge", null, new Runnable()
{
public void run()
{
queueExpunge(account, folder);
}
});
}
public void deleteDraft(final Account account, String uid)
{
LocalFolder localFolder = null;
try
{
LocalStore localStore = account.getLocalStore();
localFolder = localStore.getFolder(account.getDraftsFolderName());
localFolder.open(OpenMode.READ_WRITE);
Message message = localFolder.getMessage(uid);
if (message != null)
{
deleteMessages(new Message[] { message }, null);
}
}
catch (MessagingException me)
{
addErrorMessage(account, null, me);
}
finally
{
if (localFolder != null)
{
localFolder.close();
}
}
}
public void deleteMessages(final Message[] messages, final MessagingListener listener)
{
actOnMessages(messages, new MessageActor()
{
@Override
public void act(final Account account, final Folder folder,
final List<Message> messages)
{
for (Message message : messages)
{
suppressMessage(account, folder.getName(), message);
}
putBackground("deleteMessages", null, new Runnable()
{
public void run()
{
deleteMessagesSynchronous(account, folder.getName(), messages.toArray(EMPTY_MESSAGE_ARRAY), listener);
}
});
}
});
}
private void deleteMessagesSynchronous(final Account account, final String folder, final Message[] messages,
MessagingListener listener)
{
Folder localFolder = null;
Folder localTrashFolder = null;
String[] uids = getUidsFromMessages(messages);
try
{
//We need to make these callbacks before moving the messages to the trash
//as messages get a new UID after being moved
for (Message message : messages)
{
if (listener != null)
{
listener.messageDeleted(account, folder, message);
}
for (MessagingListener l : getListeners())
{
l.messageDeleted(account, folder, message);
}
}
Store localStore = account.getLocalStore();
localFolder = localStore.getFolder(folder);
if (folder.equals(account.getTrashFolderName()) || K9.FOLDER_NONE.equals(account.getTrashFolderName()))
{
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "Deleting messages in trash folder or trash set to -None-, not copying");
localFolder.setFlags(messages, new Flag[] { Flag.DELETED }, true);
}
else
{
localTrashFolder = localStore.getFolder(account.getTrashFolderName());
if (!localTrashFolder.exists())
{
localTrashFolder.create(Folder.FolderType.HOLDS_MESSAGES);
}
if (localTrashFolder.exists())
{
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "Deleting messages in normal folder, moving");
localFolder.moveMessages(messages, localTrashFolder);
}
}
for (MessagingListener l : getListeners())
{
l.folderStatusChanged(account, folder, localFolder.getUnreadMessageCount());
if (localTrashFolder != null)
{
l.folderStatusChanged(account, account.getTrashFolderName(), localTrashFolder.getUnreadMessageCount());
}
}
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "Delete policy for account " + account.getDescription() + " is " + account.getDeletePolicy());
if (folder.equals(account.getOutboxFolderName()))
{
for (Message message : messages)
{
// If the message was in the Outbox, then it has been copied to local Trash, and has
// to be copied to remote trash
PendingCommand command = new PendingCommand();
command.command = PENDING_COMMAND_APPEND;
command.arguments =
new String[]
{
account.getTrashFolderName(),
message.getUid()
};
queuePendingCommand(account, command);
}
processPendingCommands(account);
}
else if ( account.getDeletePolicy() == Account.DELETE_POLICY_ON_DELETE)
{
if (folder.equals(account.getTrashFolderName()))
{
queueSetFlag(account, folder, Boolean.toString(true), Flag.DELETED.toString(), uids);
}
else
{
queueMoveOrCopy(account, folder, account.getTrashFolderName(), false, uids);
}
processPendingCommands(account);
}
else if (account.getDeletePolicy() == Account.DELETE_POLICY_MARK_AS_READ)
{
queueSetFlag(account, folder, Boolean.toString(true), Flag.SEEN.toString(), uids);
processPendingCommands(account);
}
else
{
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "Delete policy " + account.getDeletePolicy() + " prevents delete from server");
}
for (String uid : uids)
{
unsuppressMessage(account, folder, uid);
}
}
catch (UnavailableStorageException e)
{
Log.i(K9.LOG_TAG, "Failed to delete message because storage is not available - trying again later.");
throw new UnavailableAccountException(e);
}
catch (MessagingException me)
{
addErrorMessage(account, null, me);
throw new RuntimeException("Error deleting message from local store.", me);
}
finally
{
if (localFolder != null)
{
localFolder.close();
}
if (localTrashFolder != null)
{
localTrashFolder.close();
}
}
}
private String[] getUidsFromMessages(Message[] messages)
{
String[] uids = new String[messages.length];
for (int i = 0; i < messages.length; i++)
{
uids[i] = messages[i].getUid();
}
return uids;
}
private void processPendingEmptyTrash(PendingCommand command, Account account) throws MessagingException
{
Store remoteStore = account.getRemoteStore();
Folder remoteFolder = remoteStore.getFolder(account.getTrashFolderName());
try
{
if (remoteFolder.exists())
{
remoteFolder.open(OpenMode.READ_WRITE);
remoteFolder.setFlags(new Flag [] { Flag.DELETED }, true);
if (Account.EXPUNGE_IMMEDIATELY.equals(account.getExpungePolicy()))
{
remoteFolder.expunge();
}
}
}
finally
{
if (remoteFolder != null)
{
remoteFolder.close();
}
}
}
public void emptyTrash(final Account account, MessagingListener listener)
{
putBackground("emptyTrash", listener, new Runnable()
{
public void run()
{
Folder localFolder = null;
try
{
Store localStore = account.getLocalStore();
localFolder = localStore.getFolder(account.getTrashFolderName());
localFolder.open(OpenMode.READ_WRITE);
localFolder.setFlags(new Flag[] { Flag.DELETED }, true);
for (MessagingListener l : getListeners())
{
l.emptyTrashCompleted(account);
}
List<String> args = new ArrayList<String>();
PendingCommand command = new PendingCommand();
command.command = PENDING_COMMAND_EMPTY_TRASH;
command.arguments = args.toArray(EMPTY_STRING_ARRAY);
queuePendingCommand(account, command);
processPendingCommands(account);
}
catch (UnavailableStorageException e)
{
Log.i(K9.LOG_TAG, "Failed to empty trash because storage is not available - trying again later.");
throw new UnavailableAccountException(e);
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "emptyTrash failed", e);
addErrorMessage(account, null, e);
}
finally
{
if (localFolder != null)
{
localFolder.close();
}
}
}
});
}
public void sendAlternate(final Context context, Account account, Message message)
{
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "About to load message " + account.getDescription() + ":" + message.getFolder().getName()
+ ":" + message.getUid() + " for sendAlternate");
loadMessageForView(account, message.getFolder().getName(),
message.getUid(), new MessagingListener()
{
@Override
public void loadMessageForViewBodyAvailable(Account account, String folder, String uid,
Message message)
{
if (K9.DEBUG)
Log.d(K9.LOG_TAG, "Got message " + account.getDescription() + ":" + folder
+ ":" + message.getUid() + " for sendAlternate");
try
{
Intent msg=new Intent(Intent.ACTION_SEND);
String quotedText = null;
Part part = MimeUtility.findFirstPartByMimeType(message,
"text/plain");
if (part == null)
{
part = MimeUtility.findFirstPartByMimeType(message, "text/html");
}
if (part != null)
{
quotedText = MimeUtility.getTextFromPart(part);
}
if (quotedText != null)
{
msg.putExtra(Intent.EXTRA_TEXT, quotedText);
}
msg.putExtra(Intent.EXTRA_SUBJECT, "Fwd: " + message.getSubject());
msg.setType("text/plain");
context.startActivity(Intent.createChooser(msg, context.getString(R.string.send_alternate_chooser_title)));
}
catch (MessagingException me)
{
Log.e(K9.LOG_TAG, "Unable to send email through alternate program", me);
}
}
});
}
/**
* Checks mail for one or multiple accounts. If account is null all accounts
* are checked.
*
* @param context
* @param account
* @param listener
*/
public void checkMail(final Context context, final Account account,
final boolean ignoreLastCheckedTime,
final boolean useManualWakeLock,
final MessagingListener listener)
{
TracingWakeLock twakeLock = null;
if (useManualWakeLock)
{
TracingPowerManager pm = TracingPowerManager.getPowerManager(context);
twakeLock = pm.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, "K9 MessagingController.checkMail");
twakeLock.setReferenceCounted(false);
twakeLock.acquire(K9.MANUAL_WAKE_LOCK_TIMEOUT);
}
final TracingWakeLock wakeLock = twakeLock;
for (MessagingListener l : getListeners())
{
l.checkMailStarted(context, account);
}
putBackground("checkMail", listener, new Runnable()
{
public void run()
{
final NotificationManager notifMgr = (NotificationManager)context
.getSystemService(Context.NOTIFICATION_SERVICE);
try
{
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Starting mail check");
Preferences prefs = Preferences.getPreferences(context);
Account[] accounts;
if (account != null)
{
accounts = new Account[]
{
account
};
}
else
{
accounts = prefs.getAccounts();
}
for (final Account account : accounts)
{
if (!account.isAvailable(context))
{
if (K9.DEBUG)
{
Log.i(K9.LOG_TAG, "Skipping synchronizing unavailable account " + account.getDescription());
}
continue;
}
final long accountInterval = account.getAutomaticCheckIntervalMinutes() * 60 * 1000;
if (!ignoreLastCheckedTime && accountInterval <= 0)
{
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Skipping synchronizing account " + account.getDescription());
continue;
}
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Synchronizing account " + account.getDescription());
account.setRingNotified(false);
sendPendingMessages(account, listener);
try
{
Account.FolderMode aDisplayMode = account.getFolderDisplayMode();
Account.FolderMode aSyncMode = account.getFolderSyncMode();
Store localStore = account.getLocalStore();
for (final Folder folder : localStore.getPersonalNamespaces(false))
{
folder.open(Folder.OpenMode.READ_WRITE);
folder.refresh(prefs);
Folder.FolderClass fDisplayClass = folder.getDisplayClass();
Folder.FolderClass fSyncClass = folder.getSyncClass();
if (modeMismatch(aDisplayMode, fDisplayClass))
{
// Never sync a folder that isn't displayed
/*
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Not syncing folder " + folder.getName() +
" which is in display mode " + fDisplayClass + " while account is in display mode " + aDisplayMode);
*/
continue;
}
if (modeMismatch(aSyncMode, fSyncClass))
{
// Do not sync folders in the wrong class
/*
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Not syncing folder " + folder.getName() +
" which is in sync mode " + fSyncClass + " while account is in sync mode " + aSyncMode);
*/
continue;
}
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Folder " + folder.getName() + " was last synced @ " +
new Date(folder.getLastChecked()));
if (!ignoreLastCheckedTime && folder.getLastChecked() >
(System.currentTimeMillis() - accountInterval))
{
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Not syncing folder " + folder.getName()
+ ", previously synced @ " + new Date(folder.getLastChecked())
+ " which would be too recent for the account period");
continue;
}
putBackground("sync" + folder.getName(), null, new Runnable()
{
public void run()
{
LocalFolder tLocalFolder = null;
try
{
// In case multiple Commands get enqueued, don't run more than
// once
final LocalStore localStore = account.getLocalStore();
tLocalFolder = localStore.getFolder(folder.getName());
tLocalFolder.open(Folder.OpenMode.READ_WRITE);
if (!ignoreLastCheckedTime && tLocalFolder.getLastChecked() >
(System.currentTimeMillis() - accountInterval))
{
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Not running Command for folder " + folder.getName()
+ ", previously synced @ " + new Date(folder.getLastChecked())
+ " which would be too recent for the account period");
return;
}
if (account.isShowOngoing())
{
Notification notif = new Notification(R.drawable.ic_menu_refresh,
context.getString(R.string.notification_bg_sync_ticker, account.getDescription(), folder.getName()),
System.currentTimeMillis());
Intent intent = MessageList.actionHandleFolderIntent(context, account, K9.INBOX);
PendingIntent pi = PendingIntent.getActivity(context, 0, intent, 0);
notif.setLatestEventInfo(context, context.getString(R.string.notification_bg_sync_title), account.getDescription()
+ context.getString(R.string.notification_bg_title_separator) + folder.getName(), pi);
notif.flags = Notification.FLAG_ONGOING_EVENT;
if (K9.NOTIFICATION_LED_WHILE_SYNCING)
{
notif.flags |= Notification.FLAG_SHOW_LIGHTS;
notif.ledARGB = account.getNotificationSetting().getLedColor();
notif.ledOnMS = K9.NOTIFICATION_LED_FAST_ON_TIME;
notif.ledOffMS = K9.NOTIFICATION_LED_FAST_OFF_TIME;
}
notifMgr.notify(K9.FETCHING_EMAIL_NOTIFICATION - account.getAccountNumber(), notif);
}
try
{
synchronizeMailboxSynchronous(account, folder.getName(), listener, null);
}
finally
{
if (account.isShowOngoing())
{
notifMgr.cancel(K9.FETCHING_EMAIL_NOTIFICATION - account.getAccountNumber());
}
}
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Exception while processing folder " +
account.getDescription() + ":" + folder.getName(), e);
addErrorMessage(account, null, e);
}
finally
{
if (tLocalFolder != null)
{
tLocalFolder.close();
}
}
}
}
);
}
}
catch (MessagingException e)
{
Log.e(K9.LOG_TAG, "Unable to synchronize account " + account.getName(), e);
addErrorMessage(account, null, e);
}
finally
{
putBackground("clear notification flag for " + account.getDescription(), null, new Runnable()
{
public void run()
{
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Clearing notification flag for " + account.getDescription());
account.setRingNotified(false);
try
{
AccountStats stats = account.getStats(context);
if (stats == null || stats.unreadMessageCount == 0)
{
notifyAccountCancel(context, account);
}
}
catch (MessagingException e)
{
Log.e(K9.LOG_TAG, "Unable to getUnreadMessageCount for account: " + account, e);
}
}
}
);
}
}
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Unable to synchronize mail", e);
addErrorMessage(account, null, e);
}
putBackground("finalize sync", null, new Runnable()
{
public void run()
{
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Finished mail sync");
if (wakeLock != null)
{
wakeLock.release();
}
for (MessagingListener l : getListeners())
{
l.checkMailFinished(context, account);
}
}
}
);
}
});
}
public void compact(final Account account, final MessagingListener ml)
{
putBackground("compact:" + account.getDescription(), ml, new Runnable()
{
public void run()
{
try
{
LocalStore localStore = account.getLocalStore();
long oldSize = localStore.getSize();
localStore.compact();
long newSize = localStore.getSize();
if (ml != null)
{
ml.accountSizeChanged(account, oldSize, newSize);
}
for (MessagingListener l : getListeners())
{
l.accountSizeChanged(account, oldSize, newSize);
}
}
catch (UnavailableStorageException e)
{
Log.i(K9.LOG_TAG, "Failed to compact account because storage is not available - trying again later.");
throw new UnavailableAccountException(e);
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Failed to compact account " + account.getDescription(), e);
}
}
});
}
public void clear(final Account account, final MessagingListener ml)
{
putBackground("clear:" + account.getDescription(), ml, new Runnable()
{
public void run()
{
try
{
LocalStore localStore = account.getLocalStore();
long oldSize = localStore.getSize();
localStore.clear();
localStore.resetVisibleLimits(account.getDisplayCount());
long newSize = localStore.getSize();
AccountStats stats = new AccountStats();
stats.size = newSize;
stats.unreadMessageCount = 0;
stats.flaggedMessageCount = 0;
if (ml != null)
{
ml.accountSizeChanged(account, oldSize, newSize);
ml.accountStatusChanged(account, stats);
}
for (MessagingListener l : getListeners())
{
l.accountSizeChanged(account, oldSize, newSize);
l.accountStatusChanged(account, stats);
}
}
catch (UnavailableStorageException e)
{
Log.i(K9.LOG_TAG, "Failed to clear account because storage is not available - trying again later.");
throw new UnavailableAccountException(e);
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Failed to clear account " + account.getDescription(), e);
}
}
});
}
public void recreate(final Account account, final MessagingListener ml)
{
putBackground("recreate:" + account.getDescription(), ml, new Runnable()
{
public void run()
{
try
{
LocalStore localStore = account.getLocalStore();
long oldSize = localStore.getSize();
localStore.recreate();
localStore.resetVisibleLimits(account.getDisplayCount());
long newSize = localStore.getSize();
AccountStats stats = new AccountStats();
stats.size = newSize;
stats.unreadMessageCount = 0;
stats.flaggedMessageCount = 0;
if (ml != null)
{
ml.accountSizeChanged(account, oldSize, newSize);
ml.accountStatusChanged(account, stats);
}
for (MessagingListener l : getListeners())
{
l.accountSizeChanged(account, oldSize, newSize);
l.accountStatusChanged(account, stats);
}
}
catch (UnavailableStorageException e)
{
Log.i(K9.LOG_TAG, "Failed to recreate an account because storage is not available - trying again later.");
throw new UnavailableAccountException(e);
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Failed to recreate account " + account.getDescription(), e);
}
}
});
}
private boolean shouldNotifyForMessage(Account account, Message message)
{
// Do not notify if the user does not have notifications
// enabled or if the message has been read
if (!account.isNotifyNewMail() || message.isSet(Flag.SEEN) || (account.getName() == null))
{
return false;
}
Folder folder = message.getFolder();
if (folder != null)
{
// No notification for new messages in Trash, Drafts, or Sent folder.
// But do notify if it's the INBOX (see issue 1817).
String folderName = folder.getName();
if (!K9.INBOX.equals(folderName) &&
(account.getTrashFolderName().equals(folderName)
|| account.getDraftsFolderName().equals(folderName)
|| account.getSentFolderName().equals(folderName)))
{
return false;
}
}
return true;
}
/** Creates a notification of new email messages
* ringtone, lights, and vibration to be played
*/
private boolean notifyAccount(Context context, Account account, Message message, int previousUnreadMessageCount, AtomicInteger newMessageCount)
{
// If we don't even have an account name, don't show the notification
// (This happens during initial account setup)
//
if (account.getName() == null)
{
return false;
}
// If we have a message, set the notification to "<From>: <Subject>"
StringBuilder messageNotice = new StringBuilder();
final KeyguardManager keyguardService = (KeyguardManager) context.getSystemService(Context.KEYGUARD_SERVICE);
try
{
if (message != null && message.getFrom() != null)
{
Address[] fromAddrs = message.getFrom();
String from = fromAddrs.length > 0 ? fromAddrs[0].toFriendly().toString() : null;
String subject = message.getSubject();
if (subject == null)
{
subject = context.getString(R.string.general_no_subject);
}
if (from != null)
{
// Show From: address by default
if (!account.isAnIdentity(fromAddrs))
{
messageNotice.append(from + ": " + subject);
}
// show To: if the message was sent from me
else
{
if (!account.isNotifySelfNewMail())
{
return false;
}
Address[] rcpts = message.getRecipients(Message.RecipientType.TO);
String to = rcpts.length > 0 ? rcpts[0].toFriendly().toString() : null;
if (to != null)
{
messageNotice.append(String.format(context.getString(R.string.message_to_fmt), to) +": "+subject);
}
else
{
messageNotice.append(context.getString(R.string.general_no_sender) + ": "+subject);
}
}
}
}
}
catch (MessagingException e)
{
Log.e(K9.LOG_TAG, "Unable to get message information for notification.", e);
}
// If privacy mode active and keyguard active
// OR
// If we could not set a per-message notification, revert to a default message
if ((K9.keyguardPrivacy() && keyguardService.inKeyguardRestrictedInputMode()) || messageNotice.length() == 0)
{
messageNotice = new StringBuilder(context.getString(R.string.notification_new_title));
}
NotificationManager notifMgr =
(NotificationManager)context.getSystemService(Context.NOTIFICATION_SERVICE);
Notification notif = new Notification(R.drawable.stat_notify_email_generic, messageNotice, System.currentTimeMillis());
notif.number = previousUnreadMessageCount + newMessageCount.get();
Intent i = FolderList.actionHandleNotification(context, account, message.getFolder().getName());
PendingIntent pi = PendingIntent.getActivity(context, 0, i, 0);
String accountNotice = context.getString(R.string.notification_new_one_account_fmt, notif.number, account.getDescription());
notif.setLatestEventInfo(context, accountNotice, messageNotice, pi);
// Only ring or vibrate if we have not done so already on this
// account and fetch
boolean ringAndVibrate = false;
if (!account.isRingNotified())
{
account.setRingNotified(true);
ringAndVibrate = true;
}
configureNotification(account.getNotificationSetting(), notif, ringAndVibrate);
notifMgr.notify(account.getAccountNumber(), notif);
return true;
}
/**
* @param setting
* Configuration template. Never <code>null</code>.
* @param notification
* Object to configure. Never <code>null</code>.
* @param ringAndVibrate
* <code>true</code> if ringtone/vibration are allowed,
* <code>false</code> otherwise.
*/
private void configureNotification(final NotificationSetting setting, final Notification notification, final boolean ringAndVibrate)
{
if (ringAndVibrate)
{
if (setting.shouldRing())
{
String ringtone = setting.getRingtone();
notification.sound = TextUtils.isEmpty(ringtone) ? null : Uri.parse(ringtone);
notification.audioStreamType = AudioManager.STREAM_NOTIFICATION;
}
if (setting.isVibrate())
{
long[] pattern = getVibratePattern(setting.getVibratePattern(), setting.getVibrateTimes());
notification.vibrate = pattern;
}
}
if (setting.isLed())
{
notification.flags |= Notification.FLAG_SHOW_LIGHTS;
notification.ledARGB = setting.getLedColor();
notification.ledOnMS = K9.NOTIFICATION_LED_ON_TIME;
notification.ledOffMS = K9.NOTIFICATION_LED_OFF_TIME;
}
}
/*
* Fetch a vibration pattern.
*
* @param vibratePattern Vibration pattern index to use.
* @param vibrateTimes Number of times to do the vibration pattern.
* @return Pattern multiplied by the number of times requested.
*/
public static long[] getVibratePattern(int vibratePattern, int vibrateTimes)
{
// These are "off, on" patterns, specified in milliseconds
long[] pattern0 = new long[] {300,200}; // like the default pattern
long[] pattern1 = new long[] {100,200};
long[] pattern2 = new long[] {100,500};
long[] pattern3 = new long[] {200,200};
long[] pattern4 = new long[] {200,500};
long[] pattern5 = new long[] {500,500};
long[] selectedPattern = pattern0; //default pattern
switch (vibratePattern)
{
case 1:
selectedPattern = pattern1;
break;
case 2:
selectedPattern = pattern2;
break;
case 3:
selectedPattern = pattern3;
break;
case 4:
selectedPattern = pattern4;
break;
case 5:
selectedPattern = pattern5;
break;
}
long[] repeatedPattern = new long[selectedPattern.length * vibrateTimes];
for (int n = 0; n < vibrateTimes; n++)
{
System.arraycopy(selectedPattern, 0, repeatedPattern, n * selectedPattern.length, selectedPattern.length);
}
// Do not wait before starting the vibration pattern.
repeatedPattern[0] = 0;
return repeatedPattern;
}
/** Cancel a notification of new email messages */
public void notifyAccountCancel(Context context, Account account)
{
NotificationManager notifMgr =
(NotificationManager)context.getSystemService(Context.NOTIFICATION_SERVICE);
notifMgr.cancel(account.getAccountNumber());
notifMgr.cancel(-1000 - account.getAccountNumber());
}
public Message saveDraft(final Account account, final Message message)
{
Message localMessage = null;
try
{
LocalStore localStore = account.getLocalStore();
LocalFolder localFolder = localStore.getFolder(account.getDraftsFolderName());
localFolder.open(OpenMode.READ_WRITE);
localFolder.appendMessages(new Message[]
{
message
});
localMessage = localFolder.getMessage(message.getUid());
localMessage.setFlag(Flag.X_DOWNLOADED_FULL, true);
PendingCommand command = new PendingCommand();
command.command = PENDING_COMMAND_APPEND;
command.arguments = new String[]
{
localFolder.getName(),
localMessage.getUid()
};
queuePendingCommand(account, command);
processPendingCommands(account);
}
catch (MessagingException e)
{
Log.e(K9.LOG_TAG, "Unable to save message as draft.", e);
addErrorMessage(account, null, e);
}
return localMessage;
}
public boolean modeMismatch(Account.FolderMode aMode, Folder.FolderClass fMode)
{
if (aMode == Account.FolderMode.NONE
|| (aMode == Account.FolderMode.FIRST_CLASS &&
fMode != Folder.FolderClass.FIRST_CLASS)
|| (aMode == Account.FolderMode.FIRST_AND_SECOND_CLASS &&
fMode != Folder.FolderClass.FIRST_CLASS &&
fMode != Folder.FolderClass.SECOND_CLASS)
|| (aMode == Account.FolderMode.NOT_SECOND_CLASS &&
fMode == Folder.FolderClass.SECOND_CLASS))
{
return true;
}
else
{
return false;
}
}
static AtomicInteger sequencing = new AtomicInteger(0);
class Command implements Comparable<Command>
{
public Runnable runnable;
public MessagingListener listener;
public String description;
boolean isForeground;
int sequence = sequencing.getAndIncrement();
@Override
public int compareTo(Command other)
{
if (other.isForeground && !isForeground)
{
return 1;
}
else if (!other.isForeground && isForeground)
{
return -1;
}
else
{
return (sequence - other.sequence);
}
}
}
public MessagingListener getCheckMailListener()
{
return checkMailListener;
}
public void setCheckMailListener(MessagingListener checkMailListener)
{
if (this.checkMailListener != null)
{
removeListener(this.checkMailListener);
}
this.checkMailListener = checkMailListener;
if (this.checkMailListener != null)
{
addListener(this.checkMailListener);
}
}
public SORT_TYPE getSortType()
{
return sortType;
}
public void setSortType(SORT_TYPE sortType)
{
this.sortType = sortType;
}
public boolean isSortAscending(SORT_TYPE sortType)
{
Boolean sortAsc = sortAscending.get(sortType);
if (sortAsc == null)
{
return sortType.isDefaultAscending();
}
else return sortAsc;
}
public void setSortAscending(SORT_TYPE sortType, boolean nsortAscending)
{
sortAscending.put(sortType, nsortAscending);
}
public Collection<Pusher> getPushers()
{
return pushers.values();
}
public boolean setupPushing(final Account account)
{
try
{
Pusher previousPusher = pushers.remove(account);
if (previousPusher != null)
{
previousPusher.stop();
}
Preferences prefs = Preferences.getPreferences(mApplication);
Account.FolderMode aDisplayMode = account.getFolderDisplayMode();
Account.FolderMode aPushMode = account.getFolderPushMode();
List<String> names = new ArrayList<String>();
Store localStore = account.getLocalStore();
for (final Folder folder : localStore.getPersonalNamespaces(false))
{
if (folder.getName().equals(account.getErrorFolderName())
|| folder.getName().equals(account.getOutboxFolderName()))
{
/*
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Not pushing folder " + folder.getName() +
" which should never be pushed");
*/
continue;
}
folder.open(Folder.OpenMode.READ_WRITE);
folder.refresh(prefs);
Folder.FolderClass fDisplayClass = folder.getDisplayClass();
Folder.FolderClass fPushClass = folder.getPushClass();
if (modeMismatch(aDisplayMode, fDisplayClass))
{
// Never push a folder that isn't displayed
/*
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Not pushing folder " + folder.getName() +
" which is in display class " + fDisplayClass + " while account is in display mode " + aDisplayMode);
*/
continue;
}
if (modeMismatch(aPushMode, fPushClass))
{
// Do not push folders in the wrong class
/*
if (K9.DEBUG)
Log.v(K9.LOG_TAG, "Not pushing folder " + folder.getName() +
" which is in push mode " + fPushClass + " while account is in push mode " + aPushMode);
*/
continue;
}
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Starting pusher for " + account.getDescription() + ":" + folder.getName());
names.add(folder.getName());
}
if (names.size() > 0)
{
PushReceiver receiver = new MessagingControllerPushReceiver(mApplication, account, this);
int maxPushFolders = account.getMaxPushFolders();
if (names.size() > maxPushFolders)
{
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Count of folders to push for account " + account.getDescription() + " is " + names.size()
+ ", greater than limit of " + maxPushFolders + ", truncating");
names = names.subList(0, maxPushFolders);
}
try
{
Store store = account.getRemoteStore();
if (!store.isPushCapable())
{
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Account " + account.getDescription() + " is not push capable, skipping");
return false;
}
Pusher pusher = store.getPusher(receiver);
if (pusher != null)
{
Pusher oldPusher = pushers.putIfAbsent(account, pusher);
if (oldPusher == null)
{
pusher.start(names);
}
}
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Could not get remote store", e);
return false;
}
return true;
}
else
{
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "No folders are configured for pushing in account " + account.getDescription());
return false;
}
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Got exception while setting up pushing", e);
}
return false;
}
public void stopAllPushing()
{
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Stopping all pushers");
Iterator<Pusher> iter = pushers.values().iterator();
while (iter.hasNext())
{
Pusher pusher = iter.next();
iter.remove();
pusher.stop();
}
}
public void messagesArrived(final Account account, final Folder remoteFolder, final List<Message> messages, final boolean flagSyncOnly)
{
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "Got new pushed email messages for account " + account.getDescription()
+ ", folder " + remoteFolder.getName());
final CountDownLatch latch = new CountDownLatch(1);
putBackground("Push messageArrived of account " + account.getDescription()
+ ", folder " + remoteFolder.getName(), null, new Runnable()
{
public void run()
{
LocalFolder localFolder = null;
try
{
LocalStore localStore = account.getLocalStore();
localFolder= localStore.getFolder(remoteFolder.getName());
localFolder.open(OpenMode.READ_WRITE);
account.setRingNotified(false);
int newCount = downloadMessages(account, remoteFolder, localFolder, messages, flagSyncOnly);
int unreadMessageCount = setLocalUnreadCountToRemote(localFolder, remoteFolder, messages.size());
setLocalFlaggedCountToRemote(localFolder, remoteFolder);
localFolder.setLastPush(System.currentTimeMillis());
localFolder.setStatus(null);
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "messagesArrived newCount = " + newCount + ", unread count = " + unreadMessageCount);
if (unreadMessageCount == 0)
{
notifyAccountCancel(mApplication, account);
}
for (MessagingListener l : getListeners())
{
l.folderStatusChanged(account, remoteFolder.getName(), unreadMessageCount);
}
}
catch (Exception e)
{
String rootMessage = getRootCauseMessage(e);
String errorMessage = "Push failed: " + rootMessage;
try
{
localFolder.setStatus(errorMessage);
}
catch (Exception se)
{
Log.e(K9.LOG_TAG, "Unable to set failed status on localFolder", se);
}
for (MessagingListener l : getListeners())
{
l.synchronizeMailboxFailed(account, remoteFolder.getName(), errorMessage);
}
addErrorMessage(account, null, e);
}
finally
{
if (localFolder != null)
{
try
{
localFolder.close();
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Unable to close localFolder", e);
}
}
latch.countDown();
}
}
});
try
{
latch.await();
}
catch (Exception e)
{
Log.e(K9.LOG_TAG, "Interrupted while awaiting latch release", e);
}
if (K9.DEBUG)
Log.i(K9.LOG_TAG, "MessagingController.messagesArrivedLatch released");
}
enum MemorizingState { STARTED, FINISHED, FAILED };
class Memory
{
Account account;
String folderName;
MemorizingState syncingState = null;
MemorizingState sendingState = null;
MemorizingState pushingState = null;
MemorizingState processingState = null;
String failureMessage = null;
int syncingTotalMessagesInMailbox;
int syncingNumNewMessages;
int folderCompleted = 0;
int folderTotal = 0;
String processingCommandTitle = null;
Memory(Account nAccount, String nFolderName)
{
account = nAccount;
folderName = nFolderName;
}
String getKey()
{
return getMemoryKey(account, folderName);
}
}
static String getMemoryKey(Account taccount, String tfolderName)
{
return taccount.getDescription() + ":" + tfolderName;
}
class MemorizingListener extends MessagingListener
{
HashMap<String, Memory> memories = new HashMap<String, Memory>(31);
Memory getMemory(Account account, String folderName)
{
Memory memory = memories.get(getMemoryKey(account, folderName));
if (memory == null)
{
memory = new Memory(account, folderName);
memories.put(memory.getKey(), memory);
}
return memory;
}
@Override
public synchronized void synchronizeMailboxStarted(Account account, String folder)
{
Memory memory = getMemory(account, folder);
memory.syncingState = MemorizingState.STARTED;
memory.folderCompleted = 0;
memory.folderTotal = 0;
}
@Override
public synchronized void synchronizeMailboxFinished(Account account, String folder,
int totalMessagesInMailbox, int numNewMessages)
{
Memory memory = getMemory(account, folder);
memory.syncingState = MemorizingState.FINISHED;
memory.syncingTotalMessagesInMailbox = totalMessagesInMailbox;
memory.syncingNumNewMessages = numNewMessages;
}
@Override
public synchronized void synchronizeMailboxFailed(Account account, String folder,
String message)
{
Memory memory = getMemory(account, folder);
memory.syncingState = MemorizingState.FAILED;
memory.failureMessage = message;
}
synchronized void refreshOther(MessagingListener other)
{
if (other != null)
{
Memory syncStarted = null;
Memory sendStarted = null;
Memory processingStarted = null;
for (Memory memory : memories.values())
{
if (memory.syncingState != null)
{
switch (memory.syncingState)
{
case STARTED:
syncStarted = memory;
break;
case FINISHED:
other.synchronizeMailboxFinished(memory.account, memory.folderName,
memory.syncingTotalMessagesInMailbox, memory.syncingNumNewMessages);
break;
case FAILED:
other.synchronizeMailboxFailed(memory.account, memory.folderName,
memory.failureMessage);
break;
}
}
if (memory.sendingState != null)
{
switch (memory.sendingState)
{
case STARTED:
sendStarted = memory;
break;
case FINISHED:
other.sendPendingMessagesCompleted(memory.account);
break;
case FAILED:
other.sendPendingMessagesFailed(memory.account);
break;
}
}
if (memory.pushingState != null)
{
switch (memory.pushingState)
{
case STARTED:
other.setPushActive(memory.account, memory.folderName, true);
break;
case FINISHED:
other.setPushActive(memory.account, memory.folderName, false);
break;
}
}
if (memory.processingState != null)
{
switch (memory.processingState)
{
case STARTED:
processingStarted = memory;
break;
case FINISHED:
case FAILED:
other.pendingCommandsFinished(memory.account);
break;
}
}
}
Memory somethingStarted = null;
if (syncStarted != null)
{
other.synchronizeMailboxStarted(syncStarted.account, syncStarted.folderName);
somethingStarted = syncStarted;
}
if (sendStarted != null)
{
other.sendPendingMessagesStarted(sendStarted.account);
somethingStarted = sendStarted;
}
if (processingStarted != null)
{
other.pendingCommandsProcessing(processingStarted.account);
if (processingStarted.processingCommandTitle != null)
{
other.pendingCommandStarted(processingStarted.account, processingStarted.processingCommandTitle);
}
else
{
other.pendingCommandCompleted(processingStarted.account, processingStarted.processingCommandTitle);
}
somethingStarted = processingStarted;
}
if (somethingStarted != null && somethingStarted.folderTotal > 0)
{
other.synchronizeMailboxProgress(somethingStarted.account, somethingStarted.folderName, somethingStarted.folderCompleted, somethingStarted.folderTotal);
}
}
}
@Override
public synchronized void setPushActive(Account account, String folderName, boolean active)
{
Memory memory = getMemory(account, folderName);
memory.pushingState = (active ? MemorizingState.STARTED : MemorizingState.FINISHED);
}
@Override
public synchronized void sendPendingMessagesStarted(Account account)
{
Memory memory = getMemory(account, null);
memory.sendingState = MemorizingState.STARTED;
memory.folderCompleted = 0;
memory.folderTotal = 0;
}
@Override
public synchronized void sendPendingMessagesCompleted(Account account)
{
Memory memory = getMemory(account, null);
memory.sendingState = MemorizingState.FINISHED;
}
@Override
public synchronized void sendPendingMessagesFailed(Account account)
{
Memory memory = getMemory(account, null);
memory.sendingState = MemorizingState.FAILED;
}
@Override
public synchronized void synchronizeMailboxProgress(Account account, String folderName, int completed, int total)
{
Memory memory = getMemory(account, folderName);
memory.folderCompleted = completed;
memory.folderTotal = total;
}
@Override
public synchronized void pendingCommandsProcessing(Account account)
{
Memory memory = getMemory(account, null);
memory.processingState = MemorizingState.STARTED;
memory.folderCompleted = 0;
memory.folderTotal = 0;
}
@Override
public synchronized void pendingCommandsFinished(Account account)
{
Memory memory = getMemory(account, null);
memory.processingState = MemorizingState.FINISHED;
}
@Override
public synchronized void pendingCommandStarted(Account account, String commandTitle)
{
Memory memory = getMemory(account, null);
memory.processingCommandTitle = commandTitle;
}
@Override
public synchronized void pendingCommandCompleted(Account account, String commandTitle)
{
Memory memory = getMemory(account, null);
memory.processingCommandTitle = null;
}
}
private void actOnMessages(Message[] messages, MessageActor actor)
{
Map<Account, Map<Folder, List<Message>>> accountMap = new HashMap<Account, Map<Folder, List<Message>>>();
for (Message message : messages)
{
Folder folder = message.getFolder();
Account account = folder.getAccount();
Map<Folder, List<Message>> folderMap = accountMap.get(account);
if (folderMap == null)
{
folderMap = new HashMap<Folder, List<Message>>();
accountMap.put(account, folderMap);
}
List<Message> messageList = folderMap.get(folder);
if (messageList == null)
{
messageList = new LinkedList<Message>();
folderMap.put(folder, messageList);
}
messageList.add(message);
}
for (Map.Entry<Account, Map<Folder, List<Message>>> entry : accountMap.entrySet())
{
Account account = entry.getKey();
//account.refresh(Preferences.getPreferences(K9.app));
Map<Folder, List<Message>> folderMap = entry.getValue();
for (Map.Entry<Folder, List<Message>> folderEntry : folderMap.entrySet())
{
Folder folder = folderEntry.getKey();
List<Message> messageList = folderEntry.getValue();
actor.act(account, folder, messageList);
}
}
}
interface MessageActor
{
public void act(final Account account, final Folder folder, final List<Message> messages);
}
}
| No longer delete the Outbox every time it's empty - it makes it somewhat
more difficult to work with. If we want to hide it when it's empty,
that's fine. but it's not necessary to delete it.
| src/com/fsck/k9/controller/MessagingController.java | No longer delete the Outbox every time it's empty - it makes it somewhat more difficult to work with. If we want to hide it when it's empty, that's fine. but it's not necessary to delete it. | <ide><path>rc/com/fsck/k9/controller/MessagingController.java
<ide> }
<ide> if (localFolder.getMessageCount() == 0)
<ide> {
<del> localFolder.delete(false);
<add> // No longer delete the empty local outbox every time we finish sending mail
<add> // There's no real win to it and it makes the folder selection UI extra stupid
<add> // (We'd need a textentry widget to set the Outbox folder rather than a folder select widget)
<add> // localFolder.delete(false);
<ide> }
<ide> for (MessagingListener l : getListeners())
<ide> { |
|
Java | agpl-3.0 | 5908d09e886a92f4283f3d31cdcfc4a883c8c0f8 | 0 | leedonghn4/cbio-portal-webgl,bihealth/cbioportal,istemi-bahceci/cbioportal,jjgao/cbioportal,sheridancbio/cbioportal,cBioPortal/cbioportal,HectorWon/cbioportal,leedonghn4/cbioportal,kalletlak/cbioportal,onursumer/cbioportal,adamabeshouse/cbioportal,inodb/cbioportal,zhx828/cbioportal,zheins/cbioportal,gsun83/cbioportal,d3b-center/pedcbioportal,mandawilson/cbioportal,angelicaochoa/cbioportal,bengusty/cbioportal,d3b-center/pedcbioportal,leedonghn4/cbioportal,yichaoS/cbioportal,zhx828/cbioportal,mandawilson/cbioportal,leedonghn4/cbio-portal-webgl,jjgao/cbioportal,j-hudecek/cbioportal,bengusty/cbioportal,xmao/cbioportal,n1zea144/cbioportal,bihealth/cbioportal,inodb/cbioportal,inodb/cbioportal,j-hudecek/cbioportal,istemi-bahceci/cbioportal,kalletlak/cbioportal,angelicaochoa/cbioportal,pughlab/cbioportal,mandawilson/cbioportal,HectorWon/cbioportal,onursumer/cbioportal,shrumit/cbioportal-gsoc-final,fcriscuo/cbioportal,zheins/cbioportal,IntersectAustralia/cbioportal,onursumer/cbioportal,shrumit/cbioportal-gsoc-final,inodb/cbioportal,pughlab/cbioportal,IntersectAustralia/cbioportal,HectorWon/cbioportal,xmao/cbioportal,yichaoS/cbioportal,istemi-bahceci/cbioportal,jjgao/cbioportal,pughlab/cbioportal,leedonghn4/cbioportal,gsun83/cbioportal,d3b-center/pedcbioportal,pughlab/cbioportal,zhx828/cbioportal,sheridancbio/cbioportal,adamabeshouse/cbioportal,cBioPortal/cbioportal,adamabeshouse/cbioportal,sheridancbio/cbioportal,leedonghn4/cbioportal,bihealth/cbioportal,bihealth/cbioportal,xmao/cbioportal,kalletlak/cbioportal,shrumit/cbioportal-gsoc-final,holtgrewe/cbioportal,jjgao/cbioportal,yichaoS/cbioportal,zhx828/cbioportal,d3b-center/pedcbioportal,yichaoS/cbioportal,angelicaochoa/cbioportal,zheins/cbioportal,bihealth/cbioportal,adamabeshouse/cbioportal,j-hudecek/cbioportal,bihealth/cbioportal,IntersectAustralia/cbioportal,istemi-bahceci/cbioportal,mandawilson/cbioportal,holtgrewe/cbioportal,mandawilson/cbioportal,gsun83/cbioportal,gsun83/cbioportal,kalletlak/cbioportal,n1zea144/cbioportal,angelicaochoa/cbioportal,holtgrewe/cbioportal,leedonghn4/cbio-portal-webgl,yichaoS/cbioportal,fcriscuo/cbioportal,xmao/cbioportal,yichaoS/cbioportal,IntersectAustralia/cbioportal,fcriscuo/cbioportal,n1zea144/cbioportal,kalletlak/cbioportal,zheins/cbioportal,jjgao/cbioportal,leedonghn4/cbioportal,sheridancbio/cbioportal,inodb/cbioportal,kalletlak/cbioportal,fcriscuo/cbioportal,bengusty/cbioportal,zhx828/cbioportal,adamabeshouse/cbioportal,bengusty/cbioportal,IntersectAustralia/cbioportal,n1zea144/cbioportal,cBioPortal/cbioportal,HectorWon/cbioportal,IntersectAustralia/cbioportal,HectorWon/cbioportal,bengusty/cbioportal,adamabeshouse/cbioportal,angelicaochoa/cbioportal,j-hudecek/cbioportal,zheins/cbioportal,shrumit/cbioportal-gsoc-final,zheins/cbioportal,mandawilson/cbioportal,inodb/cbioportal,fcriscuo/cbioportal,pughlab/cbioportal,inodb/cbioportal,bengusty/cbioportal,HectorWon/cbioportal,shrumit/cbioportal-gsoc-final,onursumer/cbioportal,holtgrewe/cbioportal,shrumit/cbioportal-gsoc-final,n1zea144/cbioportal,sheridancbio/cbioportal,pughlab/cbioportal,IntersectAustralia/cbioportal,j-hudecek/cbioportal,onursumer/cbioportal,jjgao/cbioportal,j-hudecek/cbioportal,leedonghn4/cbio-portal-webgl,leedonghn4/cbioportal,shrumit/cbioportal-gsoc-final,istemi-bahceci/cbioportal,angelicaochoa/cbioportal,jjgao/cbioportal,cBioPortal/cbioportal,leedonghn4/cbio-portal-webgl,gsun83/cbioportal,n1zea144/cbioportal,yichaoS/cbioportal,istemi-bahceci/cbioportal,onursumer/cbioportal,holtgrewe/cbioportal,xmao/cbioportal,holtgrewe/cbioportal,cBioPortal/cbioportal,gsun83/cbioportal,sheridancbio/cbioportal,zhx828/cbioportal,kalletlak/cbioportal,n1zea144/cbioportal,angelicaochoa/cbioportal,fcriscuo/cbioportal,mandawilson/cbioportal,xmao/cbioportal,d3b-center/pedcbioportal,zhx828/cbioportal,d3b-center/pedcbioportal,gsun83/cbioportal,adamabeshouse/cbioportal,d3b-center/pedcbioportal,bihealth/cbioportal,pughlab/cbioportal,cBioPortal/cbioportal,leedonghn4/cbio-portal-webgl | package org.mskcc.portal.mutation.diagram.pfam;
import java.io.IOException;
import java.util.List;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.codehaus.jackson.map.ObjectMapper;
import org.mskcc.portal.mutation.diagram.IdMappingService;
import org.mskcc.portal.mutation.diagram.Mutation;
import org.mskcc.portal.mutation.diagram.MutationService;
import com.google.common.collect.ImmutableList;
import com.google.inject.Inject;
import com.google.inject.Singleton;
/**
* Mutation diagram data servlet.
*/
@Singleton
public final class MutationDiagramDataServlet extends HttpServlet {
/** Default serial version UID. */
private static final long serialVersionUID = 1L;
private final ObjectMapper objectMapper;
private final FeatureService featureService;
private final IdMappingService idMappingService;
private final MutationService mutationService;
@Inject
public MutationDiagramDataServlet(final ObjectMapper objectMapper, final FeatureService featureService, final IdMappingService idMappingService, final MutationService mutationService) {
this.objectMapper = objectMapper;
this.featureService = featureService;
this.idMappingService = idMappingService;
this.mutationService = mutationService;
}
@Override
protected void doPost(final HttpServletRequest request, final HttpServletResponse response) throws ServletException, IOException {
// todo: check and sanitize hugoGeneSymbol if necessary
String hugoGeneSymbol = request.getParameter("hugoGeneSymbol");
List<String> uniProtIds = idMappingService.getUniProtIds(hugoGeneSymbol);
String uniProtId = uniProtIds.get(0); // uh oh
List<Sequence> sequences = featureService.getFeatures(uniProtId);
if (!sequences.isEmpty()) {
Sequence sequence = sequences.get(0);
List<Mutation> mutations = mutationService.getMutations(hugoGeneSymbol);
for (Mutation mutation : mutations) {
Markup markup = new Markup();
markup.setDisplay("true"); // may need to be boolean
markup.setStart(mutation.getLocation());
markup.setColour(ImmutableList.of("#f36"));
markup.setLineColour("#666");
markup.setHeadStyle("diamond");
markup.setV_align("top");
markup.setType("mutation");
markup.getMetadata().put("count", mutation.getCount());
markup.getMetadata().put("type", mutation.getLabel());
markup.getMetadata().put("description", "Mutation: " + mutation.getLabel() + " (N=" + mutation.getCount() + ")");
markup.getMetadata().put("start", mutation.getLocation());
markup.getMetadata().put("database", "cBio Portal");
sequence.getMarkups().add(markup);
}
}
response.setContentType("application/json");
objectMapper.writeValue(response.getWriter(), sequences);
}
}
| portal/src/org/mskcc/portal/mutation/diagram/pfam/MutationDiagramDataServlet.java | package org.mskcc.portal.mutation.diagram.pfam;
import java.io.IOException;
import java.util.List;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.codehaus.jackson.map.ObjectMapper;
import org.mskcc.portal.mutation.diagram.IdMappingService;
import org.mskcc.portal.mutation.diagram.Mutation;
import org.mskcc.portal.mutation.diagram.MutationService;
import com.google.common.collect.ImmutableList;
import com.google.inject.Inject;
import com.google.inject.Singleton;
/**
* Mutation diagram data servlet.
*/
@Singleton
public final class MutationDiagramDataServlet extends HttpServlet {
/** Default serial version UID. */
private static final long serialVersionUID = 1L;
private final ObjectMapper objectMapper;
private final FeatureService featureService;
private final IdMappingService idMappingService;
private final MutationService mutationService;
@Inject
public MutationDiagramDataServlet(final ObjectMapper objectMapper, final FeatureService featureService, final IdMappingService idMappingService, final MutationService mutationService) {
this.objectMapper = objectMapper;
this.featureService = featureService;
this.idMappingService = idMappingService;
this.mutationService = mutationService;
}
@Override
protected void doPost(final HttpServletRequest request, final HttpServletResponse response) throws ServletException, IOException {
// todo: check and sanitize hugoGeneSymbol if necessary
String hugoGeneSymbol = request.getParameter("hugoGeneSymbol");
List<String> uniProtIds = idMappingService.getUniProtIds(hugoGeneSymbol);
String uniProtId = uniProtIds.get(0); // uh oh
List<Sequence> sequences = featureService.getFeatures(uniProtId);
if (!sequences.isEmpty()) {
Sequence sequence = sequences.get(0);
List<Mutation> mutations = mutationService.getMutations(hugoGeneSymbol);
for (Mutation mutation : mutations) {
Markup markup = new Markup();
markup.setDisplay("true"); // may need to be boolean
markup.setStart(mutation.getLocation());
markup.setColour(ImmutableList.of("#f36"));
markup.setLineColour("#666");
markup.setHeadStyle("diamond");
markup.setV_align("top");
markup.getMetadata().put("description", "Mutation: " + mutation.getLabel() + " (N=" + mutation.getCount() + ")");
markup.getMetadata().put("type", "Mutation: " + mutation.getLabel() + " (N=" + mutation.getCount() + ")");
markup.getMetadata().put("start", mutation.getLocation());
markup.getMetadata().put("database", "cBio Portal");
sequence.getMarkups().add(markup);
}
}
response.setContentType("application/json");
objectMapper.writeValue(response.getWriter(), sequences);
}
}
| adding additional details to mutation markups
| portal/src/org/mskcc/portal/mutation/diagram/pfam/MutationDiagramDataServlet.java | adding additional details to mutation markups | <ide><path>ortal/src/org/mskcc/portal/mutation/diagram/pfam/MutationDiagramDataServlet.java
<ide> markup.setLineColour("#666");
<ide> markup.setHeadStyle("diamond");
<ide> markup.setV_align("top");
<add> markup.setType("mutation");
<add> markup.getMetadata().put("count", mutation.getCount());
<add> markup.getMetadata().put("type", mutation.getLabel());
<ide> markup.getMetadata().put("description", "Mutation: " + mutation.getLabel() + " (N=" + mutation.getCount() + ")");
<del> markup.getMetadata().put("type", "Mutation: " + mutation.getLabel() + " (N=" + mutation.getCount() + ")");
<ide> markup.getMetadata().put("start", mutation.getLocation());
<ide> markup.getMetadata().put("database", "cBio Portal");
<ide> |
|
Java | mit | 6a071c5cb2b95542c3fd9cec8fac8334fa4fbfe0 | 0 | diokey/sunshine | package com.example.diokey.sunshine.app;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URI;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
/**
* Created by diokey on 2/6/15.
*
* A placeholder fragment containing a simple view.
*/
public class ForecastFragment extends Fragment {
ArrayAdapter<String> adapter = null;
ListView listView = null;
public ForecastFragment() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
this.setHasOptionsMenu(true);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_main, container, false);
List<String> foreCastItems = new ArrayList<String>();
foreCastItems.add("Today - Sunny - 88/64");
foreCastItems.add("Tomorrow - Foggy - 70/46");
foreCastItems.add("Wednesday - Sunny - 88/64");
foreCastItems.add("Thursday - Rainy - 65/46");
foreCastItems.add("Friday - Snowy - 4/-4");
foreCastItems.add("Saturday - Cloudy - 18/14");
//create an array adapter
adapter = new ArrayAdapter(getActivity(), R.layout.list_item_forecast, R.id.list_item_forecast_text_view, foreCastItems);
listView = (ListView) rootView.findViewById(R.id.list_view_forecast);
listView.setAdapter(adapter);
return rootView;
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
super.onCreateOptionsMenu(menu, inflater);
inflater.inflate(R.menu.forecastfragement,menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.refresh) {
FetchWeatherTask task = new FetchWeatherTask();
task.execute("94043");
return true;
}
return super.onOptionsItemSelected(item);
}
/* The date/time conversion code is going to be moved outside the asynctask later,
* so for convenience we're breaking it out into its own method now.
*/
private String getReadableDateString(long time){
// Because the API returns a unix timestamp (measured in seconds),
// it must be converted to milliseconds in order to be converted to valid date.
Date date = new Date(time * 1000);
SimpleDateFormat format = new SimpleDateFormat("E, MMM d");
return format.format(date).toString();
}
/**
* Prepare the weather high/lows for presentation.
*/
private String formatHighLows(double high, double low) {
// For presentation, assume the user doesn't care about tenths of a degree.
long roundedHigh = Math.round(high);
long roundedLow = Math.round(low);
String highLowStr = roundedHigh + "/" + roundedLow;
return highLowStr;
}
/**
* Take the String representing the complete forecast in JSON Format and
* pull out the data we need to construct the Strings needed for the wireframes.
*
* Fortunately parsing is easy: constructor takes the JSON string and converts it
* into an Object hierarchy for us.
*/
private String[] getWeatherDataFromJson(String forecastJsonStr, int numDays)
throws JSONException {
// These are the names of the JSON objects that need to be extracted.
final String OWM_LIST = "list";
final String OWM_WEATHER = "weather";
final String OWM_TEMPERATURE = "temp";
final String OWM_MAX = "max";
final String OWM_MIN = "min";
final String OWM_DATETIME = "dt";
final String OWM_DESCRIPTION = "main";
JSONObject forecastJson = new JSONObject(forecastJsonStr);
JSONArray weatherArray = forecastJson.getJSONArray(OWM_LIST);
String[] resultStrs = new String[numDays];
for(int i = 0; i < weatherArray.length(); i++) {
// For now, using the format "Day, description, hi/low"
String day;
String description;
String highAndLow;
// Get the JSON object representing the day
JSONObject dayForecast = weatherArray.getJSONObject(i);
// The date/time is returned as a long. We need to convert that
// into something human-readable, since most people won't read "1400356800" as
// "this saturday".
long dateTime = dayForecast.getLong(OWM_DATETIME);
day = getReadableDateString(dateTime);
// description is in a child array called "weather", which is 1 element long.
JSONObject weatherObject = dayForecast.getJSONArray(OWM_WEATHER).getJSONObject(0);
description = weatherObject.getString(OWM_DESCRIPTION);
// Temperatures are in a child object called "temp". Try not to name variables
// "temp" when working with temperature. It confuses everybody.
JSONObject temperatureObject = dayForecast.getJSONObject(OWM_TEMPERATURE);
double high = temperatureObject.getDouble(OWM_MAX);
double low = temperatureObject.getDouble(OWM_MIN);
highAndLow = formatHighLows(high, low);
resultStrs[i] = day + " - " + description + " - " + highAndLow;
}
return resultStrs;
}
public class FetchWeatherTask extends AsyncTask<String, Void, String[]> {
@Override
protected String[] doInBackground(String... zipCode) {
final String FORECAST_BASE_URL = "http://api.openweathermap.org/data/2.5/forecast/daily?";
final String QUERY = "q";
final String MODE = "mode";
String mode = "json";
final String UNITS = "units";
String units = "metric";
String COUNT = "cnt";
String count = "7";
// These two need to be declared outside the try/catch
// so that they can be closed in the finally block.
HttpURLConnection urlConnection = null;
BufferedReader reader = null;
// Will contain the raw JSON response as a string.
String forecastJsonStr = null;
try {
// Construct the URL for the OpenWeatherMap query
// Possible parameters are avaiable at OWM's forecast API page, at
// http://openweathermap.org/API#forecast
Uri uri = Uri.parse(FORECAST_BASE_URL).buildUpon()
.appendQueryParameter(QUERY,zipCode[0])
.appendQueryParameter(MODE,mode)
.appendQueryParameter(UNITS,units)
.appendQueryParameter(COUNT,count)
.build();
URL url = new URL(uri.toString());
// Create the request to OpenWeatherMap, and open the connection
urlConnection = (HttpURLConnection) url.openConnection();
urlConnection.setRequestMethod("GET");
urlConnection.connect();
// Read the input stream into a String
InputStream inputStream = urlConnection.getInputStream();
StringBuffer buffer = new StringBuffer();
if (inputStream == null) {
// Nothing to do.
forecastJsonStr = null;
}
reader = new BufferedReader(new InputStreamReader(inputStream));
String line;
while ((line = reader.readLine()) != null) {
// Since it's JSON, adding a newline isn't necessary (it won't affect parsing)
// But it does make debugging a *lot* easier if you print out the completed
// buffer for debugging.
buffer.append(line + "\n");
}
if (buffer.length() == 0) {
// Stream was empty. No point in parsing.
forecastJsonStr = null;
}
forecastJsonStr = buffer.toString();
} catch (IOException e) {
Log.e("PlaceholderFragment", "Error ", e);
// If the code didn't successfully get the weather data, there's no point in attemping
// to parse it.
forecastJsonStr = null;
} finally{
if (urlConnection != null) {
urlConnection.disconnect();
}
if (reader != null) {
try {
reader.close();
} catch (final IOException e) {
Log.e("PlaceholderFragment", "Error closing stream", e);
}
}
}
String [] res = null;
try {
res = getWeatherDataFromJson(forecastJsonStr, Integer.parseInt(count));
} catch (JSONException e) {
Log.e("ForecastFragment", "Error", e);
}
return res;
}
@Override
protected void onPostExecute(String[] strings) {
super.onPostExecute(strings);
List<String> foreCastItems = Arrays.asList(strings);
adapter.clear();
adapter.addAll(foreCastItems);
}
}
}
| app/src/main/java/com/example/diokey/sunshine/app/ForecastFragment.java | package com.example.diokey.sunshine.app;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URI;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
/**
* Created by diokey on 2/6/15.
*
* A placeholder fragment containing a simple view.
*/
public class ForecastFragment extends Fragment {
ArrayAdapter<String> adapter = null;
ListView listView = null;
public ForecastFragment() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
this.setHasOptionsMenu(true);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_main, container, false);
List<String> foreCastItems = new ArrayList<String>();
foreCastItems.add("Today - Sunny - 88/64");
foreCastItems.add("Tomorrow - Foggy - 70/46");
foreCastItems.add("Wednesday - Sunny - 88/64");
foreCastItems.add("Thursday - Rainy - 65/46");
foreCastItems.add("Friday - Snowy - 4/-4");
foreCastItems.add("Saturday - Cloudy - 18/14");
//create an array adapter
adapter = new ArrayAdapter(getActivity(), R.layout.list_item_forecast, R.id.list_item_forecast_text_view, foreCastItems);
listView = (ListView) rootView.findViewById(R.id.list_view_forecast);
listView.setAdapter(adapter);
return rootView;
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
super.onCreateOptionsMenu(menu, inflater);
inflater.inflate(R.menu.forecastfragement,menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.refresh) {
FetchWeatherTask task = new FetchWeatherTask();
task.execute("94043");
return true;
}
return super.onOptionsItemSelected(item);
}
/* The date/time conversion code is going to be moved outside the asynctask later,
* so for convenience we're breaking it out into its own method now.
*/
private String getReadableDateString(long time){
// Because the API returns a unix timestamp (measured in seconds),
// it must be converted to milliseconds in order to be converted to valid date.
Date date = new Date(time * 1000);
SimpleDateFormat format = new SimpleDateFormat("E, MMM d");
return format.format(date).toString();
}
/**
* Prepare the weather high/lows for presentation.
*/
private String formatHighLows(double high, double low) {
// For presentation, assume the user doesn't care about tenths of a degree.
long roundedHigh = Math.round(high);
long roundedLow = Math.round(low);
String highLowStr = roundedHigh + "/" + roundedLow;
return highLowStr;
}
/**
* Take the String representing the complete forecast in JSON Format and
* pull out the data we need to construct the Strings needed for the wireframes.
*
* Fortunately parsing is easy: constructor takes the JSON string and converts it
* into an Object hierarchy for us.
*/
private String[] getWeatherDataFromJson(String forecastJsonStr, int numDays)
throws JSONException {
// These are the names of the JSON objects that need to be extracted.
final String OWM_LIST = "list";
final String OWM_WEATHER = "weather";
final String OWM_TEMPERATURE = "temp";
final String OWM_MAX = "max";
final String OWM_MIN = "min";
final String OWM_DATETIME = "dt";
final String OWM_DESCRIPTION = "main";
JSONObject forecastJson = new JSONObject(forecastJsonStr);
JSONArray weatherArray = forecastJson.getJSONArray(OWM_LIST);
String[] resultStrs = new String[numDays];
for(int i = 0; i < weatherArray.length(); i++) {
// For now, using the format "Day, description, hi/low"
String day;
String description;
String highAndLow;
// Get the JSON object representing the day
JSONObject dayForecast = weatherArray.getJSONObject(i);
// The date/time is returned as a long. We need to convert that
// into something human-readable, since most people won't read "1400356800" as
// "this saturday".
long dateTime = dayForecast.getLong(OWM_DATETIME);
day = getReadableDateString(dateTime);
// description is in a child array called "weather", which is 1 element long.
JSONObject weatherObject = dayForecast.getJSONArray(OWM_WEATHER).getJSONObject(0);
description = weatherObject.getString(OWM_DESCRIPTION);
// Temperatures are in a child object called "temp". Try not to name variables
// "temp" when working with temperature. It confuses everybody.
JSONObject temperatureObject = dayForecast.getJSONObject(OWM_TEMPERATURE);
double high = temperatureObject.getDouble(OWM_MAX);
double low = temperatureObject.getDouble(OWM_MIN);
highAndLow = formatHighLows(high, low);
resultStrs[i] = day + " - " + description + " - " + highAndLow;
}
return resultStrs;
}
public class FetchWeatherTask extends AsyncTask<String, Void, String[]> {
@Override
protected String[] doInBackground(String... zipCode) {
final String FORECAST_BASE_URL = "http://api.openweathermap.org/data/2.5/forecast/daily?";
final String QUERY = "q";
final String MODE = "mode";
String mode = "json";
final String UNITS = "units";
String units = "metric";
String COUNT = "cnt";
String count = "7";
// These two need to be declared outside the try/catch
// so that they can be closed in the finally block.
HttpURLConnection urlConnection = null;
BufferedReader reader = null;
// Will contain the raw JSON response as a string.
String forecastJsonStr = null;
try {
// Construct the URL for the OpenWeatherMap query
// Possible parameters are avaiable at OWM's forecast API page, at
// http://openweathermap.org/API#forecast
Uri uri = Uri.parse(FORECAST_BASE_URL).buildUpon()
.appendQueryParameter(QUERY,zipCode[0])
.appendQueryParameter(MODE,mode)
.appendQueryParameter(UNITS,units)
.appendQueryParameter(COUNT,count)
.build();
URL url = new URL(uri.toString());
// Create the request to OpenWeatherMap, and open the connection
urlConnection = (HttpURLConnection) url.openConnection();
urlConnection.setRequestMethod("GET");
urlConnection.connect();
// Read the input stream into a String
InputStream inputStream = urlConnection.getInputStream();
StringBuffer buffer = new StringBuffer();
if (inputStream == null) {
// Nothing to do.
forecastJsonStr = null;
}
reader = new BufferedReader(new InputStreamReader(inputStream));
String line;
while ((line = reader.readLine()) != null) {
// Since it's JSON, adding a newline isn't necessary (it won't affect parsing)
// But it does make debugging a *lot* easier if you print out the completed
// buffer for debugging.
buffer.append(line + "\n");
}
if (buffer.length() == 0) {
// Stream was empty. No point in parsing.
forecastJsonStr = null;
}
forecastJsonStr = buffer.toString();
} catch (IOException e) {
Log.e("PlaceholderFragment", "Error ", e);
// If the code didn't successfully get the weather data, there's no point in attemping
// to parse it.
forecastJsonStr = null;
} finally{
if (urlConnection != null) {
urlConnection.disconnect();
}
if (reader != null) {
try {
reader.close();
} catch (final IOException e) {
Log.e("PlaceholderFragment", "Error closing stream", e);
}
}
}
String [] res = null;
try {
res = getWeatherDataFromJson(forecastJsonStr, Integer.parseInt(count));
} catch (JSONException e) {
Log.e("ForecastFragment", "Error", e);
}
return res;
}
@Override
protected void onPostExecute(String[] strings) {
super.onPostExecute(strings);
List<String> foreCastItems = Arrays.asList(strings);
adapter = new ArrayAdapter(getActivity(), R.layout.list_item_forecast, R.id.list_item_forecast_text_view, foreCastItems);
listView.setAdapter(adapter);
}
}
}
| Updated the adapter
| app/src/main/java/com/example/diokey/sunshine/app/ForecastFragment.java | Updated the adapter | <ide><path>pp/src/main/java/com/example/diokey/sunshine/app/ForecastFragment.java
<ide> protected void onPostExecute(String[] strings) {
<ide> super.onPostExecute(strings);
<ide> List<String> foreCastItems = Arrays.asList(strings);
<del> adapter = new ArrayAdapter(getActivity(), R.layout.list_item_forecast, R.id.list_item_forecast_text_view, foreCastItems);
<del> listView.setAdapter(adapter);
<add> adapter.clear();
<add> adapter.addAll(foreCastItems);
<ide> }
<ide> }
<ide> } |
|
JavaScript | bsd-3-clause | 8f4dc3ce1ab322dc41e0b0e9e4a172b42942d72d | 0 | redaktor/deliteful | dojo.provide("dojox.dtl._DomTemplated");
dojo.require("dijit._Templated");
dojo.require("dojox.dtl.dom");
dojo.require("dojox.dtl.render.dom");
dojo.require("dojox.dtl.contrib.dijit");
dojox.dtl._DomTemplated = function(){};
dojox.dtl._DomTemplated.prototype = {
_dijitTemplateCompat: false,
buildRendering: function(){
// summary:
// Construct the UI for this widget, setting this.domNode.
//render needs a domNode to work with
this.domNode = this.srcNodeRef;
if(!this._render){
var ddcd = dojox.dtl.contrib.dijit;
var old = ddcd.widgetsInTemplate;
ddcd.widgetsInTemplate = this.widgetsInTemplate;
this.template = this.template || this._getCachedTemplate(this.templatePath, this.templateString);
this._render = new dojox.dtl.render.dom.Render(this.domNode, this.template);
ddcd.widgetsInTemplate = old;
}
this.render();
this.domNode = this.template.getRootNode();
if(this.srcNodeRef && this.srcNodeRef.parentNode){
dojo.destroy(this.srcNodeRef);
delete this.srcNodeRef;
}
},
setTemplate: function(/*String|dojo._Url*/ template, /*dojox.dtl.Context?*/ context){
// summary:
// Quickly switch between templated by location
if(dojox.dtl.text._isTemplate(template)){
this.template = this._getCachedTemplate(null, template);
}else{
this.template = this._getCachedTemplate(template);
}
this.render(context);
},
render: function(/*dojox.dtl.Context?*/ context, /*dojox.dtl.DomTemplate?*/ tpl){
if(tpl){
this.template = tpl;
}
this._render.render(this._getContext(context), this.template);
},
_getContext: function(context){
if (!(context instanceof dojox.dtl.Context)) {
context = false;
}
context = context || new dojox.dtl.Context(this);
context.setThis(this);
return context;
},
_getCachedTemplate: function(templatePath, templateString){
if(!this._templates){
this._templates = {};
}
var key = templateString || templatePath.toString();
var tmplts = this._templates;
if(tmplts[key]){
return tmplts[key];
}
return (tmplts[key] = new dojox.dtl.DomTemplate(
dijit._Templated.getCachedTemplate(
templatePath,
templateString,
true
)
));
}
};
| dtl/_DomTemplated.js | dojo.provide("dojox.dtl._DomTemplated");
dojo.require("dijit._Templated");
dojo.require("dojox.dtl.dom");
dojo.require("dojox.dtl.render.dom");
dojo.require("dojox.dtl.contrib.dijit");
dojox.dtl._DomTemplated = {
prototype: {
_dijitTemplateCompat: false,
buildRendering: function(){
// summary:
// Construct the UI for this widget, setting this.domNode.
//render needs a domNode to work with
this.domNode = this.srcNodeRef;
if(!this._render){
var ddcd = dojox.dtl.contrib.dijit;
var old = ddcd.widgetsInTemplate;
ddcd.widgetsInTemplate = this.widgetsInTemplate;
this.template = this.template || this._getCachedTemplate(this.templatePath, this.templateString);
this._render = new dojox.dtl.render.dom.Render(this.domNode, this.template);
ddcd.widgetsInTemplate = old;
}
this.render();
this.domNode = this.template.getRootNode();
if(this.srcNodeRef && this.srcNodeRef.parentNode){
dojo.destroy(this.srcNodeRef);
delete this.srcNodeRef;
}
},
setTemplate: function(/*String|dojo._Url*/ template, /*dojox.dtl.Context?*/ context){
// summary:
// Quickly switch between templated by location
if(dojox.dtl.text._isTemplate(template)){
this.template = this._getCachedTemplate(null, template);
}else{
this.template = this._getCachedTemplate(template);
}
this.render(context);
},
render: function(/*dojox.dtl.Context?*/ context, /*dojox.dtl.DomTemplate?*/ tpl){
if(tpl){
this.template = tpl;
}
this._render.render(this._getContext(context), this.template);
},
_getContext: function(context){
if (!(context instanceof dojox.dtl.Context)) {
context = false;
}
context = context || new dojox.dtl.Context(this);
context.setThis(this);
return context;
},
_getCachedTemplate: function(templatePath, templateString){
if(!this._templates){
this._templates = {};
}
var key = templateString || templatePath.toString();
var tmplts = this._templates;
if(tmplts[key]){
return tmplts[key];
}
return (tmplts[key] = new dojox.dtl.DomTemplate(
dijit._Templated.getCachedTemplate(
templatePath,
templateString,
true
)
));
}
}
}; | dtl: minor fix --- "classes" should be functions, not objects, !strict, refs #9862.
git-svn-id: ee6e786acc44c9cca5ee17c71a5576c45c210361@20154 560b804f-0ae3-0310-86f3-f6aa0a117693
| dtl/_DomTemplated.js | dtl: minor fix --- "classes" should be functions, not objects, !strict, refs #9862. | <ide><path>tl/_DomTemplated.js
<ide> dojo.require("dojox.dtl.render.dom");
<ide> dojo.require("dojox.dtl.contrib.dijit");
<ide>
<del>dojox.dtl._DomTemplated = {
<del> prototype: {
<del> _dijitTemplateCompat: false,
<del> buildRendering: function(){
<del> // summary:
<del> // Construct the UI for this widget, setting this.domNode.
<add>dojox.dtl._DomTemplated = function(){};
<add>dojox.dtl._DomTemplated.prototype = {
<add> _dijitTemplateCompat: false,
<add> buildRendering: function(){
<add> // summary:
<add> // Construct the UI for this widget, setting this.domNode.
<ide>
<del> //render needs a domNode to work with
<del> this.domNode = this.srcNodeRef;
<add> //render needs a domNode to work with
<add> this.domNode = this.srcNodeRef;
<ide>
<del> if(!this._render){
<del> var ddcd = dojox.dtl.contrib.dijit;
<del> var old = ddcd.widgetsInTemplate;
<del> ddcd.widgetsInTemplate = this.widgetsInTemplate;
<del> this.template = this.template || this._getCachedTemplate(this.templatePath, this.templateString);
<del> this._render = new dojox.dtl.render.dom.Render(this.domNode, this.template);
<del> ddcd.widgetsInTemplate = old;
<del> }
<add> if(!this._render){
<add> var ddcd = dojox.dtl.contrib.dijit;
<add> var old = ddcd.widgetsInTemplate;
<add> ddcd.widgetsInTemplate = this.widgetsInTemplate;
<add> this.template = this.template || this._getCachedTemplate(this.templatePath, this.templateString);
<add> this._render = new dojox.dtl.render.dom.Render(this.domNode, this.template);
<add> ddcd.widgetsInTemplate = old;
<add> }
<ide>
<del> this.render();
<add> this.render();
<ide>
<del> this.domNode = this.template.getRootNode();
<del> if(this.srcNodeRef && this.srcNodeRef.parentNode){
<del> dojo.destroy(this.srcNodeRef);
<del> delete this.srcNodeRef;
<del> }
<del> },
<del> setTemplate: function(/*String|dojo._Url*/ template, /*dojox.dtl.Context?*/ context){
<del> // summary:
<del> // Quickly switch between templated by location
<del> if(dojox.dtl.text._isTemplate(template)){
<del> this.template = this._getCachedTemplate(null, template);
<del> }else{
<del> this.template = this._getCachedTemplate(template);
<del> }
<del> this.render(context);
<del> },
<del> render: function(/*dojox.dtl.Context?*/ context, /*dojox.dtl.DomTemplate?*/ tpl){
<del> if(tpl){
<del> this.template = tpl;
<del> }
<del> this._render.render(this._getContext(context), this.template);
<del> },
<del> _getContext: function(context){
<del> if (!(context instanceof dojox.dtl.Context)) {
<del> context = false;
<del> }
<del> context = context || new dojox.dtl.Context(this);
<del> context.setThis(this);
<del> return context;
<del> },
<del> _getCachedTemplate: function(templatePath, templateString){
<del> if(!this._templates){
<del> this._templates = {};
<del> }
<del> var key = templateString || templatePath.toString();
<del> var tmplts = this._templates;
<del> if(tmplts[key]){
<del> return tmplts[key];
<del> }
<del> return (tmplts[key] = new dojox.dtl.DomTemplate(
<del> dijit._Templated.getCachedTemplate(
<del> templatePath,
<del> templateString,
<del> true
<del> )
<del> ));
<add> this.domNode = this.template.getRootNode();
<add> if(this.srcNodeRef && this.srcNodeRef.parentNode){
<add> dojo.destroy(this.srcNodeRef);
<add> delete this.srcNodeRef;
<ide> }
<add> },
<add> setTemplate: function(/*String|dojo._Url*/ template, /*dojox.dtl.Context?*/ context){
<add> // summary:
<add> // Quickly switch between templated by location
<add> if(dojox.dtl.text._isTemplate(template)){
<add> this.template = this._getCachedTemplate(null, template);
<add> }else{
<add> this.template = this._getCachedTemplate(template);
<add> }
<add> this.render(context);
<add> },
<add> render: function(/*dojox.dtl.Context?*/ context, /*dojox.dtl.DomTemplate?*/ tpl){
<add> if(tpl){
<add> this.template = tpl;
<add> }
<add> this._render.render(this._getContext(context), this.template);
<add> },
<add> _getContext: function(context){
<add> if (!(context instanceof dojox.dtl.Context)) {
<add> context = false;
<add> }
<add> context = context || new dojox.dtl.Context(this);
<add> context.setThis(this);
<add> return context;
<add> },
<add> _getCachedTemplate: function(templatePath, templateString){
<add> if(!this._templates){
<add> this._templates = {};
<add> }
<add> var key = templateString || templatePath.toString();
<add> var tmplts = this._templates;
<add> if(tmplts[key]){
<add> return tmplts[key];
<add> }
<add> return (tmplts[key] = new dojox.dtl.DomTemplate(
<add> dijit._Templated.getCachedTemplate(
<add> templatePath,
<add> templateString,
<add> true
<add> )
<add> ));
<ide> }
<ide> }; |
|
Java | apache-2.0 | f2ed5e14f74cb0a79f8f010ea4635509bd092fc8 | 0 | emergentdotorg/shaman,emergentdotorg/shaman | package org.emergent.android.weave;
import android.app.*;
import android.content.ContentResolver;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.database.Cursor;
import android.os.AsyncTask;
import android.os.Bundle;
import android.text.Editable;
import android.text.TextWatcher;
import android.util.Log;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.widget.*;
import org.emergent.android.weave.client.WeaveAccountInfo;
import org.emergent.android.weave.persistence.Bookmarks;
import org.emergent.android.weave.persistence.Passwords;
import org.emergent.android.weave.persistence.Weaves;
import org.emergent.android.weave.syncadapter.LoginActivity;
import org.emergent.android.weave.syncadapter.SyncAssistant;
import org.emergent.android.weave.util.Dbg;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
/**
* @author Patrick Woodworth
*/
public abstract class AbstractListActivity extends ListActivity {
private static final String TAG = Dbg.getTag(AbstractListActivity.class);
private static final int EDIT_ACCOUNT_LOGIN_REQUEST = 1000;
protected EditText m_filterEdit = null;
protected SimpleCursorAdapter m_adapter = null;
private static final AtomicReference<AsyncTask<WeaveAccountInfo, Integer, Throwable>> sm_syncThread =
new AtomicReference<AsyncTask<WeaveAccountInfo, Integer, Throwable>>();
protected TextWatcher m_filterEditWatcher = new TextWatcher() {
public void afterTextChanged(Editable s) {
}
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
public void onTextChanged(CharSequence s, int start, int before, int count) {
if (m_adapter != null)
m_adapter.getFilter().filter(s);
}
};
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
@Override
protected void onResume() {
super.onResume();
}
@Override
protected void onPause() {
super.onPause();
}
@Override
protected void onDestroy() {
if (m_filterEdit != null) {
m_filterEdit.removeTextChangedListener(m_filterEditWatcher);
}
super.onDestroy();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.main_menu, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.account:
launchAccountEditor();
return true;
case R.id.reset:
wipeData();
return true;
case R.id.resync:
requestSync();
return true;
case R.id.settings:
launchPreferencesEditor();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == EDIT_ACCOUNT_LOGIN_REQUEST && resultCode == RESULT_OK) {
SharedPreferences appPrefs = DobbyUtil.getApplicationPreferences(this);
SharedPreferences.Editor editor = appPrefs.edit();
DobbyUtil.intentToLoginPrefs(editor, data);
boolean updateSaved = editor.commit();
String msg = String.format("updateSaved : '%s'", updateSaved);
Toast.makeText(this, msg, Toast.LENGTH_LONG).show();
requestSync();
}
if (requestCode == EDIT_ACCOUNT_LOGIN_REQUEST && resultCode != RESULT_OK) {
Toast.makeText(this, "update cancelled", Toast.LENGTH_LONG).show();
}
}
protected void launchPreferencesEditor() {
Intent intent = new Intent();
intent.setClass(this, ApplicationOptionsActivity.class);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(intent);
}
protected void launchAccountEditor() {
Intent intent = new Intent();
intent.setClass(this, LoginActivity.class);
DobbyUtil.loginPrefsToIntent(DobbyUtil.getApplicationPreferences(this), intent);
startActivityForResult(intent, EDIT_ACCOUNT_LOGIN_REQUEST);
}
private void requestSync() {
requestSync(this);
}
static void requestSync(final Context context) {
WeaveAccountInfo loginInfo = null;
try {
Intent intent = new Intent();
DobbyUtil.loginPrefsToIntent(DobbyUtil.getApplicationPreferences(context), intent);
loginInfo = DobbyUtil.intentToLogin(intent);
} catch (Exception e) {
Log.d(TAG, e.getMessage(), e);
}
if (loginInfo == null)
return;
Toast.makeText(context, "starting sync", Toast.LENGTH_LONG).show();
AsyncTask<WeaveAccountInfo, Integer, Throwable> aTask = new AsyncTask<WeaveAccountInfo, Integer, Throwable>() {
@Override
protected Throwable doInBackground(WeaveAccountInfo... accountInfos) {
WeaveAccountInfo accountInfo = accountInfos[0];
try {
final Set<SyncAssistant> syncAssistants = new HashSet<SyncAssistant>(Arrays.asList(
new SyncAssistant(context, Bookmarks.UPDATER),
new SyncAssistant(context, Passwords.UPDATER)
));
for (SyncAssistant syncAssistant : syncAssistants) {
syncAssistant.doQueryAndUpdate(accountInfo.toAuthToken());
}
} catch (Throwable e) {
Log.e(TAG, e.getMessage(), e);
return e;
}
return null;
}
@Override
protected void onProgressUpdate(Integer... values) {
}
@Override
protected void onPostExecute(Throwable e) {
sm_syncThread.compareAndSet(this, null);
if (e == null)
return;
String msg = String.format("sync failed : '%s'", e.getMessage());
Toast.makeText(context, msg, Toast.LENGTH_LONG).show();
}
};
boolean cmpSetRetval = sm_syncThread.compareAndSet(null, aTask);
if (cmpSetRetval)
aTask.execute(loginInfo);
}
// private static final int HELLO_ID = 1;
//
// static void setNewMessageIndicator(Context context, int messageCount){
//
// String ns = Context.NOTIFICATION_SERVICE;
// NotificationManager mNotificationManager = (NotificationManager) context.getSystemService(ns);
//
// int icon = R.drawable.stat_sys_warning;
// CharSequence tickerText = "Hello";
// long when = System.currentTimeMillis();
//
// Notification notification = new Notification(icon, tickerText, when);
//
//// Context context = getApplicationContext();
// CharSequence contentTitle = "My notification";
// CharSequence contentText = "Hello World!";
// Intent notificationIntent = new Intent(this, AbstractListActivity.class);
// PendingIntent contentIntent = PendingIntent.getActivity(context, 0, notificationIntent, 0);
//
// notification.setLatestEventInfo(context, contentTitle, contentText, contentIntent);
//
// mNotificationManager.notify(HELLO_ID, notification);
//
//// // If we're being called because a new message has been received,
//// // then display an icon and a count. Otherwise, delete the persistent
//// // message.
//// if (messageCount > 0) {
//// nm.notifyWithText(myApp.NOTIFICATION_GUID, // ID for this notification.
//// messageCount + " new message" + messageCount > 1 ? "s":"", // Text to display.
//// NotificationManager.LENGTH_SHORT); // Show it for a short time only.
// }
private void wipeData() {
Log.w(TAG, "wipeData");
ContentResolver resolver = getContentResolver();
Passwords.UPDATER.deleteRecords(resolver);
Bookmarks.UPDATER.deleteRecords(resolver);
}
protected void showMyDialog(String title, String msg) {
AlertDialog.Builder adb = new AlertDialog.Builder(this);
adb.setTitle(title);
// Log.v(TAG, msg);
adb.setMessage(msg);
adb.setPositiveButton("Ok", null);
adb.show();
}
protected void setupAdapter() {
ContentResolver cr = getContentResolver();
// Get the list view
// final ListView listView = (ListView)findViewById(R.id.listView);
// final ListView listView = this.getListView();
FilterQueryProvider qfp = getQueryFilterProvider(cr);
m_adapter = createCursorAdapter(qfp);
m_adapter.setFilterQueryProvider(qfp);
// listView.setAdapter(adapter);
setListAdapter(m_adapter);
m_filterEdit = (EditText) findViewById(R.id.search_box);
m_filterEdit.addTextChangedListener(m_filterEditWatcher);
}
protected abstract SimpleCursorAdapter createCursorAdapter(FilterQueryProvider qfp);
protected abstract FilterQueryProvider getQueryFilterProvider(ContentResolver cr);
protected class MyViewBinder implements SimpleCursorAdapter.ViewBinder {
private final SimpleDateFormat m_dateFormat = new SimpleDateFormat();
@Override
public boolean setViewValue(View view, Cursor cursor, int columnIndex) {
if (view instanceof TextView &&
Weaves.Columns.LAST_MODIFIED.equals(cursor.getColumnName(columnIndex))) {
TextView tview = (TextView) view;
long theDateLong = cursor.getLong(columnIndex);
Date theDate = new Date(theDateLong * 1000);
// GregorianCalendar sm_gregorianCalendar = new GregorianCalendar();
// sm_gregorianCalendar.setTimeInMillis(theDateLong);
String dateStr = m_dateFormat.format(theDate);
// String dateStr = "" + theDate.toString();
// String dateStr = "" + theDateLong;
tview.setText(dateStr);
return true;
}
return false;
}
}
protected class MyCursorAdapter extends SimpleCursorAdapter {
private Context context;
private int layout;
public MyCursorAdapter(Context context, int layout, Cursor c, String[] from, int[] to) {
super(context, layout, c, from, to);
this.context = context;
this.layout = layout;
this.setViewBinder(new MyViewBinder());
}
@Override
public int getStringConversionColumn() {
return super.getStringConversionColumn();
}
@Override
public void setStringConversionColumn(int stringConversionColumn) {
super.setStringConversionColumn(stringConversionColumn);
}
@Override
public Object getItem(int position) {
return super.getItem(position);
}
}
}
| src/org/emergent/android/weave/AbstractListActivity.java | package org.emergent.android.weave;
import android.app.AlertDialog;
import android.app.ListActivity;
import android.content.ContentResolver;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.database.Cursor;
import android.os.AsyncTask;
import android.os.Bundle;
import android.text.Editable;
import android.text.TextWatcher;
import android.util.Log;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.widget.*;
import org.emergent.android.weave.client.WeaveAccountInfo;
import org.emergent.android.weave.persistence.Bookmarks;
import org.emergent.android.weave.persistence.Passwords;
import org.emergent.android.weave.persistence.Weaves;
import org.emergent.android.weave.syncadapter.LoginActivity;
import org.emergent.android.weave.syncadapter.SyncAssistant;
import org.emergent.android.weave.util.Dbg;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
/**
* @author Patrick Woodworth
*/
public abstract class AbstractListActivity extends ListActivity {
private static final String TAG = Dbg.getTag(AbstractListActivity.class);
private static final int EDIT_ACCOUNT_LOGIN_REQUEST = 1000;
protected EditText m_filterEdit = null;
protected SimpleCursorAdapter m_adapter = null;
private static final AtomicReference<AsyncTask<WeaveAccountInfo, Integer, Throwable>> sm_syncThread =
new AtomicReference<AsyncTask<WeaveAccountInfo, Integer, Throwable>>();
protected TextWatcher m_filterEditWatcher = new TextWatcher() {
public void afterTextChanged(Editable s) {
}
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
public void onTextChanged(CharSequence s, int start, int before, int count) {
if (m_adapter != null)
m_adapter.getFilter().filter(s);
}
};
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
@Override
protected void onResume() {
super.onResume();
}
@Override
protected void onPause() {
super.onPause();
}
@Override
protected void onDestroy() {
if (m_filterEdit != null) {
m_filterEdit.removeTextChangedListener(m_filterEditWatcher);
}
super.onDestroy();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.main_menu, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.account:
launchAccountEditor();
return true;
case R.id.reset:
wipeData();
return true;
case R.id.resync:
requestSync();
return true;
case R.id.settings:
launchPreferencesEditor();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == EDIT_ACCOUNT_LOGIN_REQUEST && resultCode == RESULT_OK) {
SharedPreferences appPrefs = DobbyUtil.getApplicationPreferences(this);
SharedPreferences.Editor editor = appPrefs.edit();
DobbyUtil.intentToLoginPrefs(editor, data);
boolean updateSaved = editor.commit();
String msg = String.format("updateSaved : '%s'", updateSaved);
Toast.makeText(this, msg, Toast.LENGTH_LONG).show();
requestSync();
}
if (requestCode == EDIT_ACCOUNT_LOGIN_REQUEST && resultCode != RESULT_OK) {
Toast.makeText(this, "update cancelled", Toast.LENGTH_LONG).show();
}
}
protected void launchPreferencesEditor() {
Intent intent = new Intent();
intent.setClass(this, ApplicationOptionsActivity.class);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(intent);
}
protected void launchAccountEditor() {
Intent intent = new Intent();
intent.setClass(this, LoginActivity.class);
DobbyUtil.loginPrefsToIntent(DobbyUtil.getApplicationPreferences(this), intent);
startActivityForResult(intent, EDIT_ACCOUNT_LOGIN_REQUEST);
}
private void requestSync() {
requestSync(this);
}
static void requestSync(final Context context) {
WeaveAccountInfo loginInfo = null;
try {
Intent intent = new Intent();
DobbyUtil.loginPrefsToIntent(DobbyUtil.getApplicationPreferences(context), intent);
loginInfo = DobbyUtil.intentToLogin(intent);
} catch (Exception e) {
Log.d(TAG, e.getMessage(), e);
}
if (loginInfo == null)
return;
Toast.makeText(context, "starting sync", Toast.LENGTH_LONG).show();
AsyncTask<WeaveAccountInfo, Integer, Throwable> aTask = new AsyncTask<WeaveAccountInfo, Integer, Throwable>() {
@Override
protected Throwable doInBackground(WeaveAccountInfo... accountInfos) {
WeaveAccountInfo accountInfo = accountInfos[0];
try {
final Set<SyncAssistant> syncAssistants = new HashSet<SyncAssistant>(Arrays.asList(
new SyncAssistant(context, Bookmarks.UPDATER),
new SyncAssistant(context, Passwords.UPDATER)
));
for (SyncAssistant syncAssistant : syncAssistants) {
syncAssistant.doQueryAndUpdate(accountInfo.toAuthToken());
}
} catch (Throwable e) {
Log.e(TAG, e.getMessage(), e);
return e;
}
return null;
}
@Override
protected void onProgressUpdate(Integer... values) {
}
@Override
protected void onPostExecute(Throwable e) {
sm_syncThread.compareAndSet(this, null);
if (e == null)
return;
String msg = String.format("sync failed : '%s'", e.getMessage());
Toast.makeText(context, msg, Toast.LENGTH_LONG).show();
}
};
boolean cmpSetRetval = sm_syncThread.compareAndSet(null, aTask);
if (cmpSetRetval)
aTask.execute(loginInfo);
}
private void wipeData() {
Log.w(TAG, "wipeData");
ContentResolver resolver = getContentResolver();
Passwords.UPDATER.deleteRecords(resolver);
Bookmarks.UPDATER.deleteRecords(resolver);
}
protected void showMyDialog(String title, String msg) {
AlertDialog.Builder adb = new AlertDialog.Builder(this);
adb.setTitle(title);
// Log.v(TAG, msg);
adb.setMessage(msg);
adb.setPositiveButton("Ok", null);
adb.show();
}
protected void setupAdapter() {
ContentResolver cr = getContentResolver();
// Get the list view
// final ListView listView = (ListView)findViewById(R.id.listView);
// final ListView listView = this.getListView();
FilterQueryProvider qfp = getQueryFilterProvider(cr);
m_adapter = createCursorAdapter(qfp);
m_adapter.setFilterQueryProvider(qfp);
// listView.setAdapter(adapter);
setListAdapter(m_adapter);
m_filterEdit = (EditText) findViewById(R.id.search_box);
m_filterEdit.addTextChangedListener(m_filterEditWatcher);
}
protected abstract SimpleCursorAdapter createCursorAdapter(FilterQueryProvider qfp);
protected abstract FilterQueryProvider getQueryFilterProvider(ContentResolver cr);
protected class MyViewBinder implements SimpleCursorAdapter.ViewBinder {
private final SimpleDateFormat m_dateFormat = new SimpleDateFormat();
@Override
public boolean setViewValue(View view, Cursor cursor, int columnIndex) {
if (view instanceof TextView &&
Weaves.Columns.LAST_MODIFIED.equals(cursor.getColumnName(columnIndex))) {
TextView tview = (TextView) view;
long theDateLong = cursor.getLong(columnIndex);
Date theDate = new Date(theDateLong * 1000);
// GregorianCalendar sm_gregorianCalendar = new GregorianCalendar();
// sm_gregorianCalendar.setTimeInMillis(theDateLong);
String dateStr = m_dateFormat.format(theDate);
// String dateStr = "" + theDate.toString();
// String dateStr = "" + theDateLong;
tview.setText(dateStr);
return true;
}
return false;
}
}
protected class MyCursorAdapter extends SimpleCursorAdapter {
private Context context;
private int layout;
public MyCursorAdapter(Context context, int layout, Cursor c, String[] from, int[] to) {
super(context, layout, c, from, to);
this.context = context;
this.layout = layout;
this.setViewBinder(new MyViewBinder());
}
@Override
public int getStringConversionColumn() {
return super.getStringConversionColumn();
}
@Override
public void setStringConversionColumn(int stringConversionColumn) {
super.setStringConversionColumn(stringConversionColumn);
}
@Override
public Object getItem(int position) {
return super.getItem(position);
}
}
}
| Anding status bar stuff.
| src/org/emergent/android/weave/AbstractListActivity.java | Anding status bar stuff. | <ide><path>rc/org/emergent/android/weave/AbstractListActivity.java
<ide> package org.emergent.android.weave;
<ide>
<del>import android.app.AlertDialog;
<del>import android.app.ListActivity;
<add>import android.app.*;
<ide> import android.content.ContentResolver;
<ide> import android.content.Context;
<ide> import android.content.Intent;
<ide> aTask.execute(loginInfo);
<ide> }
<ide>
<add>// private static final int HELLO_ID = 1;
<add>//
<add>// static void setNewMessageIndicator(Context context, int messageCount){
<add>//
<add>// String ns = Context.NOTIFICATION_SERVICE;
<add>// NotificationManager mNotificationManager = (NotificationManager) context.getSystemService(ns);
<add>//
<add>// int icon = R.drawable.stat_sys_warning;
<add>// CharSequence tickerText = "Hello";
<add>// long when = System.currentTimeMillis();
<add>//
<add>// Notification notification = new Notification(icon, tickerText, when);
<add>//
<add>//// Context context = getApplicationContext();
<add>// CharSequence contentTitle = "My notification";
<add>// CharSequence contentText = "Hello World!";
<add>// Intent notificationIntent = new Intent(this, AbstractListActivity.class);
<add>// PendingIntent contentIntent = PendingIntent.getActivity(context, 0, notificationIntent, 0);
<add>//
<add>// notification.setLatestEventInfo(context, contentTitle, contentText, contentIntent);
<add>//
<add>// mNotificationManager.notify(HELLO_ID, notification);
<add>//
<add>//// // If we're being called because a new message has been received,
<add>//// // then display an icon and a count. Otherwise, delete the persistent
<add>//// // message.
<add>//// if (messageCount > 0) {
<add>//// nm.notifyWithText(myApp.NOTIFICATION_GUID, // ID for this notification.
<add>//// messageCount + " new message" + messageCount > 1 ? "s":"", // Text to display.
<add>//// NotificationManager.LENGTH_SHORT); // Show it for a short time only.
<add>// }
<add>
<add>
<ide> private void wipeData() {
<ide> Log.w(TAG, "wipeData");
<ide> ContentResolver resolver = getContentResolver(); |
|
Java | agpl-3.0 | bef973074121ad304fa2b027beec0d8832aa96fb | 0 | duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test | 1356fc3c-2e61-11e5-9284-b827eb9e62be | hello.java | 135197ce-2e61-11e5-9284-b827eb9e62be | 1356fc3c-2e61-11e5-9284-b827eb9e62be | hello.java | 1356fc3c-2e61-11e5-9284-b827eb9e62be | <ide><path>ello.java
<del>135197ce-2e61-11e5-9284-b827eb9e62be
<add>1356fc3c-2e61-11e5-9284-b827eb9e62be |
|
Java | apache-2.0 | 7d3c0fda01949e66958c10a0175b3837a6981303 | 0 | Team-OctOS/host_gerrit,dwhipstock/gerrit,renchaorevee/gerrit,1yvT0s/gerrit,midnightradio/gerrit,zommarin/gerrit,TonyChai24/test,1yvT0s/gerrit,WANdisco/gerrit,cjh1/gerrit,Saulis/gerrit,thinkernel/gerrit,jackminicloud/test,quyixia/gerrit,anminhsu/gerrit,gracefullife/gerrit,catrope/gerrit,evanchueng/gerrit,jeblair/gerrit,Overruler/gerrit,anminhsu/gerrit,joshuawilson/merrit,Team-OctOS/host_gerrit,TonyChai24/test,cjh1/gerrit,bootstraponline-archive/gerrit-mirror,MerritCR/merrit,MerritCR/merrit,gcoders/gerrit,atdt/gerrit,Saulis/gerrit,zommarin/gerrit,netroby/gerrit,Team-OctOS/host_gerrit,hdost/gerrit,Team-OctOS/host_gerrit,Team-OctOS/host_gerrit,catrope/gerrit,hdost/gerrit,joshuawilson/merrit,renchaorevee/gerrit,thesamet/gerrit,Seinlin/gerrit,Distrotech/gerrit,ashang/aaron-gerrit,Seinlin/gerrit,netroby/gerrit,thesamet/gerrit,thesamet/gerrit,sudosurootdev/gerrit,m1kah/gerrit-contributions,quyixia/gerrit,zommarin/gerrit,qtproject/qtqa-gerrit,teamblueridge/gerrit,zommarin/gerrit,teamblueridge/gerrit,atdt/gerrit,Distrotech/gerrit,bpollack/gerrit,GerritCodeReview/gerrit,atdt/gerrit,bpollack/gerrit,skurfuerst/gerrit,Seinlin/gerrit,gerrit-review/gerrit,Overruler/gerrit,Overruler/gerrit,keerath/gerrit_newssh,GerritCodeReview/gerrit,quyixia/gerrit,evanchueng/gerrit,catrope/gerrit,duboisf/gerrit,evanchueng/gerrit,supriyantomaftuh/gerrit,netroby/gerrit,GerritCodeReview/gerrit,TonyChai24/test,WANdisco/gerrit,1yvT0s/gerrit,Distrotech/gerrit,GerritCodeReview/gerrit-attic,hdost/gerrit,supriyantomaftuh/gerrit,midnightradio/gerrit,atdt/gerrit,qtproject/qtqa-gerrit,Saulis/gerrit,pkdevbox/gerrit,hdost/gerrit,Team-OctOS/host_gerrit,Overruler/gerrit,jackminicloud/test,bootstraponline-archive/gerrit-mirror,m1kah/gerrit-contributions,CandyShop/gerrit,sudosurootdev/gerrit,cjh1/gerrit,gcoders/gerrit,renchaorevee/gerrit,Seinlin/gerrit,dwhipstock/gerrit,MerritCR/merrit,keerath/gerrit_newssh,thinkernel/gerrit,cjh1/gerrit,joshuawilson/merrit,joshuawilson/merrit,midnightradio/gerrit,dwhipstock/gerrit,qtproject/qtqa-gerrit,bootstraponline-archive/gerrit-mirror,WANdisco/gerrit,Saulis/gerrit,skurfuerst/gerrit,WANdisco/gerrit,thesamet/gerrit,MerritCR/merrit,m1kah/gerrit-contributions,atdt/gerrit,catrope/gerrit,teamblueridge/gerrit,ckamm/gerrit,thinkernel/gerrit,Distrotech/gerrit,bpollack/gerrit,bpollack/gerrit,Distrotech/gerrit,pkdevbox/gerrit,duboisf/gerrit,jeblair/gerrit,hdost/gerrit,WANdisco/gerrit,MerritCR/merrit,makholm/gerrit-ceremony,midnightradio/gerrit,austinchic/Gerrit,Overruler/gerrit,renchaorevee/gerrit,jeblair/gerrit,supriyantomaftuh/gerrit,anminhsu/gerrit,jackminicloud/test,dwhipstock/gerrit,ashang/aaron-gerrit,ashang/aaron-gerrit,gcoders/gerrit,evanchueng/gerrit,anminhsu/gerrit,rtyley/mini-git-server,ckamm/gerrit,skurfuerst/gerrit,midnightradio/gerrit,ashang/aaron-gerrit,TonyChai24/test,makholm/gerrit-ceremony,ckamm/gerrit,gcoders/gerrit,skurfuerst/gerrit,basilgor/gerrit,makholm/gerrit-ceremony,hdost/gerrit,pkdevbox/gerrit,duboisf/gerrit,anminhsu/gerrit,dwhipstock/gerrit,sudosurootdev/gerrit,bootstraponline-archive/gerrit-mirror,teamblueridge/gerrit,zommarin/gerrit,netroby/gerrit,gerrit-review/gerrit,ashang/aaron-gerrit,CandyShop/gerrit,duboisf/gerrit,makholm/gerrit-ceremony,pkdevbox/gerrit,WANdisco/gerrit,WANdisco/gerrit,austinchic/Gerrit,bpollack/gerrit,gcoders/gerrit,gracefullife/gerrit,bpollack/gerrit,1yvT0s/gerrit,qtproject/qtqa-gerrit,bootstraponline-archive/gerrit-mirror,bootstraponline-archive/gerrit-mirror,TonyChai24/test,dwhipstock/gerrit,gerrit-review/gerrit,joshuawilson/merrit,basilgor/gerrit,ckamm/gerrit,Saulis/gerrit,Saulis/gerrit,dwhipstock/gerrit,gcoders/gerrit,austinchic/Gerrit,keerath/gerrit_newssh,Distrotech/gerrit,MerritCR/merrit,pkdevbox/gerrit,supriyantomaftuh/gerrit,hdost/gerrit,renchaorevee/gerrit,anminhsu/gerrit,thesamet/gerrit,thesamet/gerrit,netroby/gerrit,jackminicloud/test,keerath/gerrit_newssh,MerritCR/merrit,keerath/gerrit_newssh,GerritCodeReview/gerrit,basilgor/gerrit,Overruler/gerrit,gracefullife/gerrit,basilgor/gerrit,gracefullife/gerrit,qtproject/qtqa-gerrit,anminhsu/gerrit,Team-OctOS/host_gerrit,1yvT0s/gerrit,gerrit-review/gerrit,quyixia/gerrit,GerritCodeReview/gerrit-attic,midnightradio/gerrit,basilgor/gerrit,jackminicloud/test,quyixia/gerrit,thinkernel/gerrit,sudosurootdev/gerrit,GerritCodeReview/gerrit,renchaorevee/gerrit,quyixia/gerrit,gerrit-review/gerrit,evanchueng/gerrit,CandyShop/gerrit,teamblueridge/gerrit,qtproject/qtqa-gerrit,Seinlin/gerrit,austinchic/Gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit-attic,jackminicloud/test,TonyChai24/test,gcoders/gerrit,qtproject/qtqa-gerrit,ckamm/gerrit,joshuawilson/merrit,CandyShop/gerrit,supriyantomaftuh/gerrit,netroby/gerrit,quyixia/gerrit,gerrit-review/gerrit,jackminicloud/test,Seinlin/gerrit,supriyantomaftuh/gerrit,GerritCodeReview/gerrit,gerrit-review/gerrit,joshuawilson/merrit,Seinlin/gerrit,netroby/gerrit,TonyChai24/test,gracefullife/gerrit,pkdevbox/gerrit,rtyley/mini-git-server,jeblair/gerrit,CandyShop/gerrit,Distrotech/gerrit,joshuawilson/merrit,supriyantomaftuh/gerrit,GerritCodeReview/gerrit,thinkernel/gerrit,pkdevbox/gerrit,m1kah/gerrit-contributions,thesamet/gerrit,sudosurootdev/gerrit,renchaorevee/gerrit,MerritCR/merrit,thinkernel/gerrit,thinkernel/gerrit | // Copyright 2008 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.client.account;
import com.google.gerrit.client.reviewdb.Account;
import com.google.gerrit.client.reviewdb.AccountProjectWatch;
import com.google.gerrit.client.rpc.SignInRequired;
import com.google.gwt.user.client.rpc.AsyncCallback;
import com.google.gwtjsonrpc.client.RemoteJsonService;
import com.google.gwtjsonrpc.client.VoidResult;
import java.util.List;
import java.util.Set;
public interface AccountService extends RemoteJsonService {
@SignInRequired
void myAccount(AsyncCallback<Account> callback);
@SignInRequired
void changeDefaultContext(short newSetting, AsyncCallback<VoidResult> callback);
@SignInRequired
void myProjectWatch(AsyncCallback<List<AccountProjectWatchInfo>> callback);
@SignInRequired
void addProjectWatch(String projectName,
AsyncCallback<AccountProjectWatchInfo> callback);
@SignInRequired
void deleteProjectWatches(Set<AccountProjectWatch.Key> keys,
AsyncCallback<VoidResult> callback);
@SignInRequired
void myAgreements(AsyncCallback<AgreementInfo> callback);
}
| appjar/src/main/java/com/google/gerrit/client/account/AccountService.java | // Copyright 2008 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.client.account;
import com.google.gerrit.client.reviewdb.Account;
import com.google.gerrit.client.reviewdb.AccountProjectWatch;
import com.google.gerrit.client.rpc.SignInRequired;
import com.google.gwt.user.client.rpc.AsyncCallback;
import com.google.gwtjsonrpc.client.AllowCrossSiteRequest;
import com.google.gwtjsonrpc.client.RemoteJsonService;
import com.google.gwtjsonrpc.client.VoidResult;
import java.util.List;
import java.util.Set;
public interface AccountService extends RemoteJsonService {
@SignInRequired
@AllowCrossSiteRequest
void myAccount(AsyncCallback<Account> callback);
@SignInRequired
void changeDefaultContext(short newSetting, AsyncCallback<VoidResult> callback);
@SignInRequired
@AllowCrossSiteRequest
void myProjectWatch(AsyncCallback<List<AccountProjectWatchInfo>> callback);
@SignInRequired
void addProjectWatch(String projectName,
AsyncCallback<AccountProjectWatchInfo> callback);
@SignInRequired
void deleteProjectWatches(Set<AccountProjectWatch.Key> keys,
AsyncCallback<VoidResult> callback);
@SignInRequired
@AllowCrossSiteRequest
void myAgreements(AsyncCallback<AgreementInfo> callback);
}
| Remove AllowCrossSiteRequest annotation from possibly sensitive methods
These methods provide direct access to user account data. They
should only be available over HTTP POST, and only after the XSRF
token trick has been satisfied. This reduces the exposure risk
for evil 3rd party pages to try and steal this data via a novel
XSRF attack we haven't heard of yet.
Signed-off-by: Shawn O. Pearce <[email protected]>
| appjar/src/main/java/com/google/gerrit/client/account/AccountService.java | Remove AllowCrossSiteRequest annotation from possibly sensitive methods | <ide><path>ppjar/src/main/java/com/google/gerrit/client/account/AccountService.java
<ide> import com.google.gerrit.client.reviewdb.AccountProjectWatch;
<ide> import com.google.gerrit.client.rpc.SignInRequired;
<ide> import com.google.gwt.user.client.rpc.AsyncCallback;
<del>import com.google.gwtjsonrpc.client.AllowCrossSiteRequest;
<ide> import com.google.gwtjsonrpc.client.RemoteJsonService;
<ide> import com.google.gwtjsonrpc.client.VoidResult;
<ide>
<ide>
<ide> public interface AccountService extends RemoteJsonService {
<ide> @SignInRequired
<del> @AllowCrossSiteRequest
<ide> void myAccount(AsyncCallback<Account> callback);
<ide>
<ide> @SignInRequired
<ide> void changeDefaultContext(short newSetting, AsyncCallback<VoidResult> callback);
<ide>
<ide> @SignInRequired
<del> @AllowCrossSiteRequest
<ide> void myProjectWatch(AsyncCallback<List<AccountProjectWatchInfo>> callback);
<ide>
<ide> @SignInRequired
<ide> AsyncCallback<VoidResult> callback);
<ide>
<ide> @SignInRequired
<del> @AllowCrossSiteRequest
<ide> void myAgreements(AsyncCallback<AgreementInfo> callback);
<ide> } |
|
JavaScript | mit | f1e812868655997cfa4e9ee2da29d972c55bf36d | 0 | kuznetsovlv/bemclassname,kuznetsovlv/bemclassname | "use strict";
const webpack = require('webpack');
const path = require('path');
const NODE_ENV = process.env.NODE_ENV || "development";
const DEV = NODE_ENV === "development";
const TEST = NODE_ENV === "test";
const WEB = NODE_ENV === "web";
const PRODUCT = NODE_ENV === "product";
const plugins = [
new webpack.NoErrorsPlugin(),
new webpack.DefinePlugin({
'process.env': {
NODE_ENV: JSON.stringify(NODE_ENV),
DEV: JSON.stringify(DEV),
TEST: JSON.stringify(TEST),
WEB: JSON.stringify(WEB),
PRODUCT: JSON.stringify(PRODUCT)
}
})
];
const webOutput = {filename: 'BEMClassNames.js', path: path.resolve(__dirname, 'web'), library: 'BEMClassNames', libraryTarget: 'var'};
const defaultOutput = {filename: 'index.js', path: path.resolve(__dirname, DEV || TEST ? 'test' : 'dist'), library: 'BEMClassNames', libraryTarget: 'umd'};
const productPlugins = [
new webpack.optimize.UglifyJsPlugin({
compress: { warnings: false, drop_console: true, unsafe: true }
})
];
function getExtentions () {
const extentions = ['', '.js'];
if (DEV || TEST)
extentions.push('_dev.js');
return extentions;
}
module.exports = {
context: path.resolve(__dirname, 'src'),
entry: DEV || TEST ? 'index_dev' : 'index',
noInfo: true,
target: 'node',
output: WEB ? webOutput : defaultOutput,
watch: DEV,
watchOptions: {
aggregateTimeout: 100
},
devtool: DEV ? "cheap-source-map" : null,
plugins: !DEV ? plugins.concat(productPlugins) : plugins,
resolve: {
modulesDirectories: ['node_modules', 'src'],
extensions: getExtentions()
},
resolveLoader: {
modulesDirectories: ['node_modules'],
moduleTemplates: ['*-loader', '*'],
extensions: ['', '.js']
},
module: {
loaders: [{
exclude: /node_modules/,
test: /\.js$/,
loader: 'babel'
}]
}
}
| webpack.config.js | "use strict";
const webpack = require('webpack');
const path = require('path');
const NODE_ENV = process.env.NODE_ENV || "development";
const DEV = NODE_ENV === "development";
const TEST = NODE_ENV === "test";
const WEB = NODE_ENV === "web";
const PRODUCT = NODE_ENV === "product";
const plugins = [
new webpack.NoErrorsPlugin(),
new webpack.DefinePlugin({
'process.env': {
NODE_ENV: JSON.stringify(NODE_ENV),
DEV: JSON.stringify(DEV),
TEST: JSON.stringify(TEST),
WEB: JSON.stringify(WEB),
PRODUCT: JSON.stringify(PRODUCT)
}
})
];
const webOutput = {filename: 'BEMClassNames.js', path: path.resolve(__dirname, 'web'), library: 'BEMClassNames', libraryTarget: 'var'};
const defaultOutput = {filename: 'index.js', path: path.resolve(__dirname, DEV || TEST ? 'test' : 'dist')};
const productPlugins = [
new webpack.optimize.UglifyJsPlugin({
compress: { warnings: false, drop_console: true, unsafe: true }
})
];
function getExtentions () {
const extentions = ['', '.js'];
if (DEV || TEST)
extentions.push('_dev.js');
return extentions;
}
module.exports = {
context: path.resolve(__dirname, 'src'),
entry: DEV || TEST ? 'index_dev' : 'index',
noInfo: true,
target: 'node',
output: WEB ? webOutput : defaultOutput,
watch: DEV,
watchOptions: {
aggregateTimeout: 100
},
devtool: DEV ? "cheap-source-map" : null,
plugins: !DEV ? plugins.concat(productPlugins) : plugins,
resolve: {
modulesDirectories: ['node_modules', 'src'],
extensions: getExtentions()
},
resolveLoader: {
modulesDirectories: ['node_modules'],
moduleTemplates: ['*-loader', '*'],
extensions: ['', '.js']
},
module: {
loaders: [{
exclude: /node_modules/,
test: /\.js$/,
loader: 'babel'
}]
}
}
| Fixed build
| webpack.config.js | Fixed build | <ide><path>ebpack.config.js
<ide> ];
<ide>
<ide> const webOutput = {filename: 'BEMClassNames.js', path: path.resolve(__dirname, 'web'), library: 'BEMClassNames', libraryTarget: 'var'};
<del>const defaultOutput = {filename: 'index.js', path: path.resolve(__dirname, DEV || TEST ? 'test' : 'dist')};
<add>const defaultOutput = {filename: 'index.js', path: path.resolve(__dirname, DEV || TEST ? 'test' : 'dist'), library: 'BEMClassNames', libraryTarget: 'umd'};
<ide>
<ide> const productPlugins = [
<ide> new webpack.optimize.UglifyJsPlugin({ |
|
Java | bsd-3-clause | error: pathspec 'projects/rules/src/main/java/gov/nih/nci/cabig/caaers/rules/runtime/BusinessRulesExecutionService.java' did not match any file(s) known to git
| 2b0c410a7fcd638d57fab1f6796db1aebc6129aa | 1 | CBIIT/caaers,NCIP/caaers,NCIP/caaers,NCIP/caaers,CBIIT/caaers,CBIIT/caaers,CBIIT/caaers,NCIP/caaers,CBIIT/caaers | package gov.nih.nci.cabig.caaers.rules.runtime;
import java.util.List;
public interface BusinessRulesExecutionService {
public List<Object> fireRules(String bindingURI, List<Object> objects);
}
| projects/rules/src/main/java/gov/nih/nci/cabig/caaers/rules/runtime/BusinessRulesExecutionService.java |
SVN-Revision: 1657
| projects/rules/src/main/java/gov/nih/nci/cabig/caaers/rules/runtime/BusinessRulesExecutionService.java | <ide><path>rojects/rules/src/main/java/gov/nih/nci/cabig/caaers/rules/runtime/BusinessRulesExecutionService.java
<add>package gov.nih.nci.cabig.caaers.rules.runtime;
<add>
<add>import java.util.List;
<add>
<add>public interface BusinessRulesExecutionService {
<add>
<add> public List<Object> fireRules(String bindingURI, List<Object> objects);
<add>} |
||
Java | apache-2.0 | 4c4d6fa328b381b490989cdede2d272603a08724 | 0 | vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa,vespa-engine/vespa | // Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.vespa.hosted.provision.maintenance.retire;
import com.google.common.net.InetAddresses;
import com.yahoo.config.provision.Environment;
import com.yahoo.config.provision.Flavor;
import com.yahoo.config.provision.RegionName;
import com.yahoo.config.provision.SystemName;
import com.yahoo.config.provision.Zone;
import com.yahoo.vespa.hosted.provision.Node;
import java.net.Inet4Address;
import java.util.Optional;
/**
* @author freva
*/
public class RetireIPv4OnlyNodes implements RetirementPolicy {
private final Zone zone;
public RetireIPv4OnlyNodes(Zone zone) {
this.zone = zone;
}
@Override
public boolean isActive() {
if(zone.system() == SystemName.cd) {
return zone.environment() == Environment.dev || zone.environment() == Environment.prod;
}
if (zone.system() == SystemName.main) {
if (zone.region().equals(RegionName.from("us-east-3"))) {
return zone.environment() == Environment.perf || zone.environment() == Environment.prod;
} else if (zone.region().equals(RegionName.from("us-west-1"))) {
return zone.environment() == Environment.prod;
} else if (zone.region().equals(RegionName.from("us-central-1"))) {
return zone.environment() == Environment.prod;
}
}
return false;
}
@Override
public Optional<String> shouldRetire(Node node) {
if (node.flavor().getType() == Flavor.Type.VIRTUAL_MACHINE) return Optional.empty();
boolean shouldRetire = node.ipAddresses().stream()
.map(InetAddresses::forString)
.allMatch(address -> address instanceof Inet4Address);
return shouldRetire ? Optional.of("Node is IPv4-only") : Optional.empty();
}
}
| node-repository/src/main/java/com/yahoo/vespa/hosted/provision/maintenance/retire/RetireIPv4OnlyNodes.java | // Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.vespa.hosted.provision.maintenance.retire;
import com.google.common.net.InetAddresses;
import com.yahoo.config.provision.Environment;
import com.yahoo.config.provision.Flavor;
import com.yahoo.config.provision.RegionName;
import com.yahoo.config.provision.SystemName;
import com.yahoo.config.provision.Zone;
import com.yahoo.vespa.hosted.provision.Node;
import java.net.Inet4Address;
import java.util.Optional;
/**
* @author freva
*/
public class RetireIPv4OnlyNodes implements RetirementPolicy {
private final Zone zone;
public RetireIPv4OnlyNodes(Zone zone) {
this.zone = zone;
}
@Override
public boolean isActive() {
if(zone.system() == SystemName.cd) {
return zone.environment() == Environment.dev || zone.environment() == Environment.prod;
}
if (zone.system() == SystemName.main) {
if (zone.region().equals(RegionName.from("us-east-3"))) {
return zone.environment() == Environment.perf || zone.environment() == Environment.prod;
} else if (zone.region().equals(RegionName.from("us-west-1"))) {
return zone.environment() == Environment.prod;
}
}
return false;
}
@Override
public Optional<String> shouldRetire(Node node) {
if (node.flavor().getType() == Flavor.Type.VIRTUAL_MACHINE) return Optional.empty();
boolean shouldRetire = node.ipAddresses().stream()
.map(InetAddresses::forString)
.allMatch(address -> address instanceof Inet4Address);
return shouldRetire ? Optional.of("Node is IPv4-only") : Optional.empty();
}
}
| retire IPv4 hosts in us-central-1
| node-repository/src/main/java/com/yahoo/vespa/hosted/provision/maintenance/retire/RetireIPv4OnlyNodes.java | retire IPv4 hosts in us-central-1 | <ide><path>ode-repository/src/main/java/com/yahoo/vespa/hosted/provision/maintenance/retire/RetireIPv4OnlyNodes.java
<ide> return zone.environment() == Environment.perf || zone.environment() == Environment.prod;
<ide> } else if (zone.region().equals(RegionName.from("us-west-1"))) {
<ide> return zone.environment() == Environment.prod;
<add> } else if (zone.region().equals(RegionName.from("us-central-1"))) {
<add> return zone.environment() == Environment.prod;
<ide> }
<ide> }
<ide> |
|
Java | apache-2.0 | 65c02ccfe6f30494df6d1f1393245bf3893def9f | 0 | rabix/bunny,rabix/bunny,rabix/bunny,rabix/bunny,rabix/bunny | package org.rabix.engine.store.memory.impl;
import com.google.inject.Inject;
import org.rabix.bindings.model.dag.DAGLinkPort.LinkPortType;
import org.rabix.engine.store.model.VariableRecord;
import org.rabix.engine.store.repository.VariableRecordRepository;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.LinkedBlockingQueue;
public class InMemoryVariableRecordRepository extends VariableRecordRepository {
private ConcurrentMap<UUID, Collection<VariableRecord>> variableRecordsPerContext;
private ConcurrentMap<UUID, ConcurrentMap<String, Collection<VariableRecord>>> variableRecordsPerContextAndId;
@Inject
public InMemoryVariableRecordRepository() {
variableRecordsPerContext = new ConcurrentHashMap<>();
variableRecordsPerContextAndId = new ConcurrentHashMap<>();
}
public int insert(VariableRecord variableRecord) {
getVariableRecords(variableRecord.getRootId()).add(variableRecord);
getVariableRecordsWithId(variableRecord.getRootId(), variableRecord.getJobId()).add(variableRecord);
return 1;
}
public void delete(UUID rootId) {
variableRecordsPerContext.remove(rootId);
}
public int update(VariableRecord variableRecord) {
for (VariableRecord vr : getVariableRecordsWithId(variableRecord.getRootId(), variableRecord.getJobId())) {
if (vr.getPortId().equals(variableRecord.getPortId()) && vr.getType().equals(variableRecord.getType())) {
vr.setValue(variableRecord.getValue());
return 1;
}
}
return 0;
}
public List<VariableRecord> getByType(String jobId, LinkPortType type, UUID contextId) {
List<VariableRecord> result = new ArrayList<>();
for (VariableRecord vr : getVariableRecordsWithId(contextId, jobId)) {
if (vr.getType().equals(type)) {
result.add(vr);
}
}
return result;
}
public VariableRecord get(String jobId, String portId, LinkPortType type, UUID contextId) {
for (VariableRecord vr : getVariableRecordsWithId(contextId, jobId)) {
if (vr.getPortId().equals(portId) && vr.getType().equals(type)) {
return vr;
}
}
return null;
}
public List<VariableRecord> getByPort(String jobId, String portId, UUID contextId) {
List<VariableRecord> result = new ArrayList<>();
for (VariableRecord vr : getVariableRecordsWithId(contextId, jobId)) {
if (vr.getPortId().equals(portId)) {
result.add(vr);
}
}
return result;
}
public List<VariableRecord> findByJobId(String jobId, LinkPortType type, UUID contextId) {
List<VariableRecord> result = new ArrayList<>();
for (VariableRecord vr : getVariableRecordsWithId(contextId, jobId)) {
if (vr.getType().equals(type)) {
result.add(vr);
}
}
return result;
}
public List<VariableRecord> find(UUID contextId) {
return new ArrayList<>(getVariableRecords(contextId));
}
public Collection<VariableRecord> getVariableRecords(UUID contextId) {
return variableRecordsPerContext.computeIfAbsent(contextId, k -> new ArrayList<>());
}
public Collection<VariableRecord> getVariableRecordsWithId(UUID contextId, String jobId) {
ConcurrentMap<String, Collection<VariableRecord>> map = variableRecordsPerContextAndId.computeIfAbsent(contextId, k -> new ConcurrentHashMap<>());
return map.computeIfAbsent(jobId, k-> new ArrayList<>());
}
@Override
public void insertBatch(Iterator<VariableRecord> records) {
while(records.hasNext()) {
insert(records.next());
}
}
@Override
public void updateBatch(Iterator<VariableRecord> records) {
while(records.hasNext()) {
update(records.next());
}
}
@Override
public void delete(String id, UUID rootId) {
getVariableRecords(rootId).removeIf(variableRecord -> variableRecord.getJobId().equals(id));
getVariableRecordsWithId(rootId, id).clear();
}
@Override
public void deleteByRootId(UUID rootId) {
variableRecordsPerContext.remove(rootId);
}
}
| rabix-engine-store/src/main/java/org/rabix/engine/store/memory/impl/InMemoryVariableRecordRepository.java | package org.rabix.engine.store.memory.impl;
import com.google.inject.Inject;
import org.rabix.bindings.model.dag.DAGLinkPort.LinkPortType;
import org.rabix.engine.store.model.VariableRecord;
import org.rabix.engine.store.repository.VariableRecordRepository;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.LinkedBlockingQueue;
public class InMemoryVariableRecordRepository extends VariableRecordRepository {
private ConcurrentMap<UUID, Collection<VariableRecord>> variableRecordsPerContext;
private ConcurrentMap<UUID, ConcurrentMap<String, Collection<VariableRecord>>> variableRecordsPerContextAndId;
@Inject
public InMemoryVariableRecordRepository() {
variableRecordsPerContext = new ConcurrentHashMap<>();
variableRecordsPerContextAndId = new ConcurrentHashMap<>();
}
public int insert(VariableRecord variableRecord) {
getVariableRecords(variableRecord.getRootId()).add(variableRecord);
getVariableRecordsWithId(variableRecord.getRootId(), variableRecord.getJobId()).add(variableRecord);
return 1;
}
public void delete(UUID rootId) {
variableRecordsPerContext.remove(rootId);
}
public int update(VariableRecord variableRecord) {
for (VariableRecord vr : getVariableRecordsWithId(variableRecord.getRootId(), variableRecord.getJobId())) {
if (vr.getPortId().equals(variableRecord.getPortId()) && vr.getType().equals(variableRecord.getType())) {
vr.setValue(variableRecord.getValue());
return 1;
}
}
return 0;
}
public List<VariableRecord> getByType(String jobId, LinkPortType type, UUID contextId) {
List<VariableRecord> result = new ArrayList<>();
for (VariableRecord vr : getVariableRecordsWithId(contextId, jobId)) {
if (vr.getType().equals(type)) {
result.add(vr);
}
}
return result;
}
public VariableRecord get(String jobId, String portId, LinkPortType type, UUID contextId) {
for (VariableRecord vr : getVariableRecordsWithId(contextId, jobId)) {
if (vr.getPortId().equals(portId) && vr.getType().equals(type)) {
return vr;
}
}
return null;
}
public List<VariableRecord> getByPort(String jobId, String portId, UUID contextId) {
List<VariableRecord> result = new ArrayList<>();
for (VariableRecord vr : getVariableRecordsWithId(contextId, jobId)) {
if (vr.getPortId().equals(portId)) {
result.add(vr);
}
}
return result;
}
public List<VariableRecord> findByJobId(String jobId, LinkPortType type, UUID contextId) {
List<VariableRecord> result = new ArrayList<>();
for (VariableRecord vr : getVariableRecordsWithId(contextId, jobId)) {
if (vr.getType().equals(type)) {
result.add(vr);
}
}
return result;
}
public List<VariableRecord> find(UUID contextId) {
return new ArrayList<>(getVariableRecords(contextId));
}
public Collection<VariableRecord> getVariableRecords(UUID contextId) {
return variableRecordsPerContext.computeIfAbsent(contextId, k -> new ArrayList<>());
}
public Collection<VariableRecord> getVariableRecordsWithId(UUID contextId, String jobId) {
ConcurrentMap<String, Collection<VariableRecord>> map = variableRecordsPerContextAndId.computeIfAbsent(contextId, k -> new ConcurrentHashMap<>());
return map.computeIfAbsent(jobId, k-> new ArrayList<>());
}
@Override
public void insertBatch(Iterator<VariableRecord> records) {
while(records.hasNext()) {
insert(records.next());
}
}
@Override
public void updateBatch(Iterator<VariableRecord> records) {
while(records.hasNext()) {
update(records.next());
}
}
@Override
public void delete(String id, UUID rootId) {
getVariableRecords(rootId).removeIf(variableRecord -> variableRecord.getJobId().equals(id));
}
@Override
public void deleteByRootId(UUID rootId) {
variableRecordsPerContext.remove(rootId);
}
}
| Id cleanup
| rabix-engine-store/src/main/java/org/rabix/engine/store/memory/impl/InMemoryVariableRecordRepository.java | Id cleanup | <ide><path>abix-engine-store/src/main/java/org/rabix/engine/store/memory/impl/InMemoryVariableRecordRepository.java
<ide> @Override
<ide> public void delete(String id, UUID rootId) {
<ide> getVariableRecords(rootId).removeIf(variableRecord -> variableRecord.getJobId().equals(id));
<add> getVariableRecordsWithId(rootId, id).clear();
<ide> }
<ide>
<ide> @Override |
|
JavaScript | mit | b7ed4390ee407e4eddb8f51e5cb814f534e98824 | 0 | jeffh/YACS,JGrippo/YACS,jeffh/YACS,jeffh/YACS,JGrippo/YACS,JGrippo/YACS,jeffh/YACS,JGrippo/YACS | // require: objects.js
var Scheduler = {};
Scheduler.selection = new Selection();
///////////////////////////////////////////////////
// Data fetching
function getSavedSelection(){
var data = $('meta[name=selection-raw]').attr('content');
var obj = null;
if($.trim(data) !== '')
obj = $.parseJSON(data);
return obj;
}
// hidden feature: clear the user's selection
$(function(){
var params = location.search;
if(params.contains('?clear') || params.contains('&clear')){
if (confirm('Are you sure you want to clear your selection?')){
Scheduler.selection.clear();
location.href = '.';
}
}
});
///////////////////////////////////////////////////
// Hooks
// realtime search
$(function(){
var searchElement = $('#searchform');
if(searchElement.length){
var defaultHtml = $('#replacable-with-search').html();
searchElement.submit(function(){ return false; });
var SearchForm = new RealtimeForm(searchElement, {
cache: true,
updateElement: '#replacable-with-search',
additionalGET: {partial: 1},
complete: function(){
createSummaries();
},
triggerDelay: 300,
activityResponder: new ActivityResponder({
show: function(){
$('#search-spinner').show();
},
hide: function(){
$('#search-spinner').hide();
}
}),
suppressFormSubmit: true,
customHandler: function(form, fuse){
query = form.find('#q').val();
if($.trim(query) === ''){
$('#replacable-with-search').html(defaultHtml);
Scheduler.selection.refresh();
return true;
}
fuse();
return false;
},
success: function(value){
$('#replacable-with-search').html(value);
Scheduler.selection.refresh();
}
});
}
});
// Selected Course Feature
$(function(){
// if we're pointed to a schedule... disable all saving features
var isReadOnly = $('#courses').attr('data-readonly');
// async saves makes the click feel faster
var saveFuse = DelayedInvocation(function(){ Scheduler.selection.save(); });
$('#courses .course > input[type=checkbox], #courses .course .section > input[type=checkbox]').live('change', function(){
(this.checked ? Scheduler.selection.add(this) : Scheduler.selection.remove(this));
saveFuse();
});
// automatically refresh after any changes
var refresh = function(){
Scheduler.selection.refresh();
};
$(Scheduler.selection).bind('added', refresh).bind('removed', refresh);
refresh();
// load alternative schedule
var schedule = getSavedSelection();
if (schedule){
// prevents async binded events from doing anything
Scheduler.selection.destroy();
var selection = new Selection({
isReadOnly: isReadOnly,
store: new MemoryStore(),
autoload: false
}).set(schedule);
if (_.isEqual(Scheduler.selection.getRaw(), selection.getRaw())){
$('#courses input[type=checkbox]').removeAttr('disabled');
isReadOnly = false;
selection.options.isReadOnly = false;
log(['equal!']);
// we're equal -- don't say anything
} else {
log(['not equal!', Scheduler.selection.getRaw(), selection.getRaw()], this);
$('#notifications').fadeIn(1000);
Scheduler.selection = selection;
$('a[data-action=adopt-selection]').bind('click', function(){
Scheduler.selection = new Selection().set(schedule);
Scheduler.selection.save();
// it's easier to just reload the page (letting the link follow through)
var spinner = $($('img.spinner').get(0)).clone().css({display: 'inline'});
var notifications = $('#notifications');
notifications.fadeOut(100, function(){
notifications.html(spinner).width($('.nav').width()).fadeIn(100);
});
$(this).unbind();
});
}
}
// must be on selected courses page
if(!$('#selected_courses').length){
return;
}
Scheduler.courseListView = new CourseListView({
el: '#selected_courses',
selected: Scheduler.selection,
isReadOnly: isReadOnly
});
});
Scheduler.getURL = function(){
var schedulesURL = $('#schedules').attr('data-source');
if(!schedulesURL) return;
if (schedulesURL.indexOf('&id=') < 0){
Scheduler.selection.getCRNs().each(function(crn){
schedulesURL += '&id=' + crn;
});
}
return schedulesURL;
};
// Bootloader for schedules
$(function(){
if(!$('#schedules').length) return;
if (!_.isEqual(Scheduler.selection.getRaw(), getSavedSelection()))
$('#notifications').fadeIn(1000);
// parse the uri
var uri = _.compact(location.href.split('/'));
var index = 0, scheduleID = null;
if (uri[uri.length - 1] !== 'schedules'){
var index = uri[uri.length - 1] || null;
var scheduleID = uri[uri.length - 2] || null;
}
Scheduler.view = new ScheduleRootView({
id: scheduleID,
index: index ? index - 1 : index,
baseURL: $('meta[name=schedules-url]').attr('content'),
section_ids: Scheduler.selection.getCRNs()
}).render();
// set arrow keys to cycle between
$(window).bind('keydown', function(evt){
switch(evt.keyCode){
case 39: // right arrow
Scheduler.view.nextSchedule();
break;
case 37: // left arrow
Scheduler.view.prevSchedule();
break;
}
});
});
| yacs/static/global/js/v2/application.js | // require: objects.js
var Scheduler = {};
Scheduler.selection = new Selection();
///////////////////////////////////////////////////
// Data fetching
function getSavedSelection(){
var data = $('meta[name=selection-raw]').attr('content');
var obj = null;
if($.trim(data) !== '')
obj = $.parseJSON(data);
return obj;
}
// hidden feature: clear the user's selection
$(function(){
var params = location.search;
if(params.contains('?clear') || params.contains('&clear')){
if (confirm('Are you sure you want to clear your selection?')){
Scheduler.selection.clear();
location.href = '.';
}
}
});
///////////////////////////////////////////////////
// Hooks
// realtime search
$(function(){
var searchElement = $('#searchform');
if(searchElement.length){
var defaultHtml = $('#replacable-with-search').html();
searchElement.submit(function(){ return false; });
var SearchForm = new RealtimeForm(searchElement, {
cache: true,
updateElement: '#replacable-with-search',
additionalGET: {partial: 1},
complete: function(){
createSummaries();
},
triggerDelay: 300,
activityResponder: new ActivityResponder({
show: function(){
$('#search-spinner').show();
},
hide: function(){
$('#search-spinner').hide();
}
}),
suppressFormSubmit: true,
customHandler: function(form, fuse){
query = form.find('#q').val();
if($.trim(query) === ''){
$('#replacable-with-search').html(defaultHtml);
Scheduler.selection.refresh();
return true;
}
fuse();
return false;
},
success: function(value){
$('#replacable-with-search').html(value);
Scheduler.selection.refresh();
}
});
}
});
// Selected Course Feature
$(function(){
// if we're pointed to a schedule... disable all saving features
var isReadOnly = $('#courses').attr('data-readonly');
// async saves makes the click feel faster
var saveFuse = DelayedInvocation(function(){ Scheduler.selection.save(); });
$('#courses .course > input[type=checkbox], #courses .course .section > input[type=checkbox]').live('change', function(){
(this.checked ? Scheduler.selection.add(this) : Scheduler.selection.remove(this));
saveFuse();
});
// automatically refresh after any changes
var refresh = function(){
Scheduler.selection.refresh();
};
$(Scheduler.selection).bind('added', refresh).bind('removed', refresh);
refresh();
// load alternative schedule
var schedule = getSavedSelection();
if (schedule){
// prevents async binded events from doing anything
Scheduler.selection.destroy();
var selection = new Selection({
isReadOnly: isReadOnly,
store: new MemoryStore(),
autoload: false
}).set(schedule);
if (_.isEqual(Scheduler.selection.getRaw(), selection.getRaw())){
$('#courses input[type=checkbox]').removeAttr('disabled');
isReadOnly = false;
selection.options.isReadOnly = false;
log(['equal!']);
// we're equal -- don't say anything
} else {
log(['not equal!', Scheduler.selection.getRaw(), selection.getRaw()], this);
$('#notifications').fadeIn(1000);
Scheduler.selection = selection;
$('a[data-action=adopt-selection]').bind('click', function(){
Scheduler.selection = new Selection().set(schedule);
Scheduler.selection.save();
// it's easier to just reload the page (letting the link follow through)
var spinner = $($('img.spinner').get(0)).clone().css({display: 'inline'});
var notifications = $('#notifications');
notifications.fadeOut(100, function(){
notifications.html(spinner).width($('.nav').width()).fadeIn(100);
});
$(this).unbind();
});
}
}
// must be on selected courses page
if(!$('#selected_courses').length){
return;
}
Scheduler.courseListView = new CourseListView({
el: '#selected_courses',
selected: Scheduler.selection,
isReadOnly: isReadOnly
});
});
Scheduler.getURL = function(){
var schedulesURL = $('#schedules').attr('data-source');
if(!schedulesURL) return;
if (schedulesURL.indexOf('&id=') < 0){
Scheduler.selection.getCRNs().each(function(crn){
schedulesURL += '&id=' + crn;
});
}
return schedulesURL;
};
// Bootloader for schedules
$(function(){
if(!$('#schedules').length) return;
if (!_.isEqual(Scheduler.selection.getRaw(), getSavedSelection()))
$('#notifications').fadeIn(1000);
// parse the uri
var uri = _.compact(location.href.split('/'));
var index = 0, scheduleID = null;
if (uri[uri.length - 1] !== 'schedules'){
var index = uri[length - 1] || null;
var scheduleID = uri[uri.length - 2] || null;
}
Scheduler.view = new ScheduleRootView({
id: scheduleID,
index: index ? index - 1 : index,
baseURL: $('meta[name=schedules-url]').attr('content'),
section_ids: Scheduler.selection.getCRNs()
}).render();
// set arrow keys to cycle between
$(window).bind('keydown', function(evt){
switch(evt.keyCode){
case 39: // right arrow
Scheduler.view.nextSchedule();
break;
case 37: // left arrow
Scheduler.view.prevSchedule();
break;
}
});
});
| fixed perma link bug
| yacs/static/global/js/v2/application.js | fixed perma link bug | <ide><path>acs/static/global/js/v2/application.js
<ide> var uri = _.compact(location.href.split('/'));
<ide> var index = 0, scheduleID = null;
<ide> if (uri[uri.length - 1] !== 'schedules'){
<del> var index = uri[length - 1] || null;
<add> var index = uri[uri.length - 1] || null;
<ide> var scheduleID = uri[uri.length - 2] || null;
<ide> }
<ide> |
|
Java | mit | 69f5ea17dd5b07521c1ed6f3fadc248f83e4ad08 | 0 | chipster/chipster,chipster/chipster,chipster/chipster,chipster/chipster,chipster/chipster,chipster/chipster,chipster/chipster,chipster/chipster,chipster/chipster,chipster/chipster | package fi.csc.microarray.databeans;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.jms.JMSException;
import javax.swing.Icon;
import org.apache.log4j.Logger;
import org.mortbay.util.IO;
import fi.csc.microarray.client.ClientApplication;
import fi.csc.microarray.client.Session;
import fi.csc.microarray.client.operation.OperationRecord;
import fi.csc.microarray.client.session.SessionLoader;
import fi.csc.microarray.client.session.SessionSaver;
import fi.csc.microarray.databeans.DataBean.ContentLocation;
import fi.csc.microarray.databeans.DataBean.DataNotAvailableHandling;
import fi.csc.microarray.databeans.DataBean.Link;
import fi.csc.microarray.databeans.features.Feature;
import fi.csc.microarray.databeans.features.FeatureProvider;
import fi.csc.microarray.databeans.features.Modifier;
import fi.csc.microarray.databeans.handlers.ContentHandler;
import fi.csc.microarray.databeans.handlers.LocalFileContentHandler;
import fi.csc.microarray.databeans.handlers.RemoteContentHandler;
import fi.csc.microarray.databeans.handlers.ZipContentHandler;
import fi.csc.microarray.exception.MicroarrayException;
import fi.csc.microarray.filebroker.FileBrokerClient.FileBrokerArea;
import fi.csc.microarray.filebroker.FileBrokerException;
import fi.csc.microarray.filebroker.NotEnoughDiskSpaceException;
import fi.csc.microarray.module.Module;
import fi.csc.microarray.util.IOUtils;
import fi.csc.microarray.util.IOUtils.CopyProgressListener;
import fi.csc.microarray.util.Strings;
public class DataManager {
public static enum StorageMethod {
LOCAL_USER(true, true),
LOCAL_TEMP(true, true),
LOCAL_SESSION(true, false),
REMOTE_CACHED(false, true),
REMOTE_STORAGE(false, true);
// Groups that describe how fast different methods are to access.
// Keep these up-to-date when you add methods!
public static StorageMethod[] LOCAL_FILE_METHODS = {LOCAL_USER, LOCAL_TEMP};
public static StorageMethod[] REMOTE_FILE_METHODS = {REMOTE_CACHED, REMOTE_STORAGE};
public static StorageMethod[] OTHER_SLOW_METHODS = {LOCAL_SESSION};
private boolean isLocal;
private boolean isRandomAccess;
StorageMethod(boolean isLocal, boolean isRandomAccess) {
this.isLocal = isLocal;
this.isRandomAccess = isRandomAccess;
}
public boolean isLocal() {
return isLocal;
}
public boolean isRandomAccess() {
return isRandomAccess;
}
}
private static final String TEMP_DIR_PREFIX = "chipster";
private static final int MAX_FILENAME_LENGTH = 256;
private static final Logger logger = Logger.getLogger(DataManager.class);
/**
* Reports session validation related problems.
*/
public static class ValidationException extends Exception {
public ValidationException(String validationDetails) {
// TODO Auto-generated constructor stub
}
}
/**
* The initial name for the root folder.
*/
public final static String ROOT_NAME = "Datasets";
private Map<String, FeatureProvider> factories = new HashMap<String, FeatureProvider>();
private Map<String, Modifier> modifiers = new HashMap<String, Modifier>();
/** MIME types for the DataBeans */
private Map<String, ContentType> contentTypes = new HashMap<String, ContentType>();
/** Mapping file extensions to content types */
private Map<String, String> extensionMap = new HashMap<String, String>();
private LinkedList<DataChangeListener> listeners = new LinkedList<DataChangeListener>();
private boolean eventsEnabled = false;
private DataFolder rootFolder;
private File repositoryRoot;
private LinkedList<Module> modules;
private ZipContentHandler zipContentHandler = new ZipContentHandler();
private LocalFileContentHandler localFileContentHandler = new LocalFileContentHandler();
private RemoteContentHandler remoteContentHandler = new RemoteContentHandler();
public DataManager() throws Exception {
rootFolder = createFolder(DataManager.ROOT_NAME);
// initialize repository
repositoryRoot = createRepository();
}
public void setRootFolder(DataFolder folder) {
this.rootFolder = folder;
}
public File getRepository() {
return repositoryRoot;
}
/**
* Returns the root folder, acting as a gateway into the actual data
* content under this manager.
*/
public DataFolder getRootFolder() {
return rootFolder;
}
public boolean isRootFolder(DataFolder folder) {
return (rootFolder == folder) && (rootFolder != null);
}
/**
* Creates a folder under this manager. Folder will be created without parent.
*
* @param name name for the new folder
*/
public DataFolder createFolder(String name) {
DataFolder folder = new DataFolder(this, name);
return folder;
}
/**
* Creates a folder under this manager.
*
* @param parent under which folder the new folder is to be created
* @param name name for the new folder
*/
public DataFolder createFolder(DataFolder root, String name) {
DataFolder folder = new DataFolder(this, name);
connectChild(folder, root); // events are dispatched from here
return folder;
}
/**
* Adds a listener listening to changes in beans and folders of this manager.
*/
public void addDataChangeListener(DataChangeListener listener) {
logger.debug("adding DataChangeListener: " + listener);
if (listener == null) {
throw new IllegalArgumentException("listener cannot be null");
}
listeners.add(listener);
}
/**
* Creates a new empty file in the repository managed by this FSDataManager.
* All the files in this repository should be created by this method.
*
* The actual contents of the files may be added either by the
* createDataBean(..., InputStream) methods of this manager, or
* externally and then using the createDataBean(... File) methods
* to create the DataBean.
*
* This is needed to to avoid overwriting data, in the case of
* duplicate DataBean names.
*
* @author Taavi Hupponen
*
* @param beanName
* @return
* @throws IOException
*/
public synchronized File createNewRepositoryFile(String beanName) throws IOException {
String fileName = beanName.replaceAll("[^\\w\\.\\-_]", "");
if (fileName.length() < 1) {
fileName = "data";
} else if (fileName.length() > MAX_FILENAME_LENGTH) {
fileName = fileName.substring(0, MAX_FILENAME_LENGTH);
}
File file = new File(this.repositoryRoot, fileName);
// if file with the beanName already exists, add running number to the name
int indexOfDot = fileName.lastIndexOf(".");
String newFileName = "";
for (int i = 1; file.exists() && i < Integer.MAX_VALUE; i++) {
// no dot add to end
if (indexOfDot < 0 ) {
newFileName = fileName + "-" + i;
}
// add before last dot
else {
newFileName = fileName.substring(0, indexOfDot) + "-" + i + fileName.substring(indexOfDot, fileName.length());
}
file = new File(this.repositoryRoot, newFileName);
}
// create the file
if (!file.createNewFile()) {
throw new IOException("Could not create file " + fileName);
}
// return the file
file.deleteOnExit();
return file;
}
private File createRepository() throws IOException {
// get temp dir
File tempRoot = getTempRoot();
if (!tempRoot.canWrite()) {
// give up
throw new IOException("Could not create repository directory.");
}
String fileName = TEMP_DIR_PREFIX;
File repository = new File(tempRoot, fileName);
// if directory with that name already exists, add running number
boolean repositoryCreated = false;
for (int i = 1; !repositoryCreated && i < 1000; i++) {
repositoryCreated = repository.mkdir();
if (!repositoryCreated) {
repository = new File(tempRoot, fileName + "-" + i);
}
}
if (!repositoryCreated) {
throw new IOException("Could not create repository directory.");
}
repository.deleteOnExit();
return repository;
}
private File getTempRoot() {
File tempDir = new File(System.getProperty("java.io.tmpdir"));
// check if temp dir is writeable
if (!tempDir.canWrite()) {
// try home dir
tempDir = new File(System.getProperty("user.home"));
if (!tempDir.canWrite()) {
// try current working dir
tempDir = new File(System.getProperty("user.dir"));
}
}
return tempDir;
}
/**
* @param enabled if property change events should be sent
* @see #addDataChangeListener(DataChangeListener)
*/
public void setEventsEnabled(boolean enabled) {
this.eventsEnabled = enabled;
}
public void dispatchEventIfVisible(DataChangeEvent event) {
if (event.getDataItem().getParent() != null) {
dispatchEvent(event);
}
}
public void dispatchEvent(DataChangeEvent event) {
if (eventsEnabled) {
// dispatch events only for connected datas
for (DataChangeListener listener : listeners) {
if (listener == null) {
logger.error("One of the DataChangeListeners listeners was null.");
} else {
logger.debug("Notifying DataChangeListener " + listener.toString());
}
try {
listener.dataChanged(event);
} catch (RuntimeException e) {
// we will not let GUI problems to stop important DataBean manipulation operations
// and possibly lead to DataBean model corruption
logger.error("DataChangeEvent dispatch failed", e);
}
}
}
}
public static DataBean[] wrapSource(DataBean source) {
DataBean[] sources = null;
if (source != null) {
sources = new DataBean[1];
sources[0] = source;
} else {
sources = new DataBean[0];
}
return sources;
}
/**
* Guess the MIME content type using the filename.
*
* For now, simply use the extension to figure out the mime type.
*
* Types are plugged at ApplicationConstants.
*
*/
public ContentType guessContentType(String name) {
ContentType type = null;
if(name.lastIndexOf(".") != -1){
String extension = name.substring(name.lastIndexOf(".") + 1, name.length()).toLowerCase();
String typeName = extensionMap.get(extension);
if (typeName != null) {
type = contentTypes.get(typeName);
}
}
if (type == null) {
type = contentTypes.get("application/octet-stream");
}
return type;
}
/**
* Guesses MIME content type from a filename and possibly file content.
*/
public ContentType guessContentType(File file) {
return guessContentType(file.getName());
}
/**
* @return MIME content type for a given extension
*/
public ContentType getContentType(String typeName) {
return contentTypes.get(typeName);
}
/**
* Plugs a MIME content type, so that it can be used in all beans under this manager.
*
* @param mimeType MIME name
* @param supported is this a known (supported directly) content type?
* @param description a short textual description
* @param extensions file extensions belonging to this type
*/
public void plugContentType(String mimeType, boolean supported, boolean binary, String description, Icon icon, String... extensions) {
// create the content type
contentTypes.put(mimeType, new ContentType(mimeType, supported, binary, description, icon, extensions));
// add extensions to search map
for (String extension: extensions) {
extensionMap.put(extension, mimeType);
}
}
/**
* Plugs a modifier (part of Feature API), so that it can be used in all beans under this manager.
*/
public void plugModifier(String name, Modifier modifier) {
modifiers.put(name, modifier);
}
/**
* Plugs a feature factory, so that it can be used in all beans under this manager.
*/
public void plugFeatureFactory(String name, FeatureProvider plugin) {
logger.debug("plugged " + plugin.getClass().getSimpleName() + " at " + name);
plugin.setName(name);
factories.put(name, plugin);
}
public Modifier fetchModifier(String modifierName) {
return modifiers.get(modifierName);
}
public Feature fetchFeature(String featureName, DataBean bean) {
String bestMatch = null;
for (String feature : factories.keySet()) {
if (featureName.startsWith(feature)) {
if (bestMatch == null || feature.length() > bestMatch.length()) {
// current best match
bestMatch = feature;
}
}
}
FeatureProvider factory = factories.get(bestMatch);
if (factory == null) {
throw new RuntimeException("no feature factory plugged in for \"" + featureName + "\" (total of " + factories.size() + " factories plugged)");
}
logger.debug("best match for " + featureName + " was " + (factory != null ? factory.getName() : factory));
String namePostfix = getNamePostfix(featureName, factory.getName());
return factory.createFeature(namePostfix, bean);
}
/**
* Find and return the first DataItem with the given name.
* @param name the name of the DataItem being search for
* @return the first found DataItem with given name
*/
public DataItem findDataItem(String name) {
return findDataItem(name, getRootFolder());
}
private DataItem findDataItem(String name, DataItem root) {
DataItem matchingItem = null;
// root item matches
if (root.getName().equals(name)) {
return root;
}
// root is a folder, search children
else if (root instanceof DataFolder) {
for (DataItem child: ((DataFolder)root).getChildren()) {
matchingItem = findDataItem(name, child);
if (matchingItem != null) {
return matchingItem;
}
}
}
// no match found
return null;
}
/**
* Find and return the first DataBean with the given name.
* @param name the name of the DataBean being search for
* @return the first found DataBean with given name
*/
public DataBean getDataBean(String name) {
for (DataBean dataBean : databeans()) {
if (dataBean.getName().equals(name)) {
return dataBean;
}
}
return null;
}
/**
* Create a local temporary file DataBean without content, without a parent folder and without sources.
* If a reference to this bean is lost it can not be accessed any more.
*/
public DataBean createLocalTempDataBean(String name) throws MicroarrayException {
try {
File contentFile = createNewRepositoryFile(name);
DataBean bean = createDataBean(name);
addUrl(bean, StorageMethod.LOCAL_TEMP, contentFile.toURI().toURL());
return bean;
} catch (IOException e) {
throw new MicroarrayException(e);
}
}
/**
* Creates new DataBean. Infers content type of the created DataBean from the name.
*
* @param name name of the DataBean
* @return new DataBean that is not connected to a DataFolder
*/
public DataBean createDataBean(String name) throws MicroarrayException {
DataBean data = new DataBean(name, guessContentType(name), this);
return data;
}
/**
* Convenience method for creating a local file DataBean. Initialises the DataBean with local file
* location. The file is used directly, the contents are not copied anywhere.
*
*/
public DataBean createDataBean(String name, File contentFile) throws MicroarrayException {
try {
DataBean bean = createDataBean(name);
addUrl(bean, StorageMethod.LOCAL_USER, contentFile.toURI().toURL());
return bean;
} catch (IOException e) {
throw new MicroarrayException(e);
}
}
/**
* Convenience method for creating a local temporary file DataBean with content.
* Content stream is read into a temp file and location of the file is stored
* to DataBean.
*/
public DataBean createDataBean(String name, InputStream content) throws MicroarrayException {
// copy the data from the input stream to the file in repository
File contentFile;
try {
contentFile = createNewRepositoryFile(name);
InputStream input = new BufferedInputStream(content);
OutputStream output = new BufferedOutputStream(new FileOutputStream(contentFile));
IO.copy(input, output);
input.close();
output.flush();
output.close();
} catch (IOException ioe) {
throw new MicroarrayException(ioe);
}
// create and return the bean
DataBean bean = createDataBean(name);
try {
addUrl(bean, StorageMethod.LOCAL_TEMP, contentFile.toURI().toURL());
} catch (MalformedURLException e) {
throw new MicroarrayException(e);
}
return bean;
}
/**
* Load session from a file.
*
* @see #saveSession(File, ClientApplication)
*/
public void loadSession(File sessionFile, boolean isDataless) throws Exception {
SessionLoader sessionLoader = new SessionLoader(sessionFile, isDataless, this);
sessionLoader.loadSession();
}
/**
* Saves session (all data: beans, folder structure, operation metadata, links etc.) to a file.
* File is a zip file with all the data files and one metadata file.
*
* @return true if the session was saved perfectly
* @throws Exception
*/
public void saveSession(File sessionFile) throws Exception {
// save session file
boolean metadataValid = false;
SessionSaver sessionSaver = new SessionSaver(sessionFile, this);
metadataValid = sessionSaver.saveSession();
// check validation
if (!metadataValid) {
// save was successful but metadata validation failed, file might be usable
String validationDetails = sessionSaver.getValidationErrors();
throw new ValidationException(validationDetails);
}
}
/**
* Saves lightweight session (folder structure, operation metadata, links etc.) to a file.
* Does not save actual data inside databeans.
*
* @return true if the session was saved perfectly
* @throws Exception
*/
public void saveLightweightSession(File sessionFile) throws Exception {
SessionSaver sessionSaver = new SessionSaver(sessionFile, this);
sessionSaver.saveLightweightSession();
}
/**
* Returns debug print out of current session state.
*
* @return print out of session state
*/
public String printSession() {
StringBuffer buffer = new StringBuffer();
SessionSaver.dumpSession(rootFolder, buffer);
return buffer.toString();
}
public void saveStorageSession(File sessionFile) throws Exception {
SessionSaver sessionSaver = new SessionSaver(sessionFile, this);
sessionSaver.saveStorageSession();
}
/**
* Delete DataItem and its children (if any). Root folder cannot be removed.
*
* @param data item to be deleted
*/
public void delete(DataItem data) {
if (data instanceof DataFolder) {
deleteDataFolder((DataFolder)data);
} else {
deleteDataBean((DataBean)data);
}
}
/**
* Remove all DataBeans and DataFolders, except for the root folder.
*/
public void deleteAllDataItems() {
deleteDataFolder(getRootFolder());
}
private void deleteDataBean(DataBean bean) {
// remove from operation history
for (DataBean source : databeans()) {
// we must iterate all datas because links cannot be trusted (they might have been removed by user)
OperationRecord operationRecord = source.getOperationRecord();
if (operationRecord != null) {
operationRecord.removeInput(bean);
}
}
// remove links
for (Link linkType : Link.values()) {
// Remove outgoing links
for (DataBean target : bean.getLinkTargets(linkType)) {
bean.removeLink(linkType, target);
}
// Remove incoming links
for (DataBean source : bean.getLinkSources(linkType)) {
source.removeLink(linkType, bean);
}
}
// remove bean
DataFolder folder = bean.getParent();
if (folder != null) {
disconnectChild(bean, folder);
}
// remove physical file
bean.delete();
}
/**
* Return all DataBeans under this manager.
*/
public List<DataBean> databeans() {
LinkedList<DataBean> databeans = new LinkedList<DataBean>();
for (DataFolder folder : folders()) {
for (DataItem child : folder.getChildren()) {
if (child instanceof DataBean) {
databeans.add((DataBean) child);
}
}
}
return databeans;
}
/**
* Return all DataFolders under this manager.
*/
public List<DataFolder> folders() {
return folders(getRootFolder());
}
public List<DataFolder> folders(DataFolder parent) {
LinkedList<DataFolder> folders = new LinkedList<DataFolder>();
folders.add(parent);
for (DataItem child : parent.getChildren()) {
if (child instanceof DataFolder) {
folders.addAll(folders((DataFolder) child));
}
}
return folders;
}
public OutputStream getContentOutputStreamAndLockDataBean(DataBean bean) throws IOException {
// only local temp beans support output, so convert to local temp bean if needed
ContentLocation tempLocalLocation = bean.getContentLocation(StorageMethod.LOCAL_TEMP);
if (tempLocalLocation == null) {
this.convertToLocalTempDataBean(bean);
tempLocalLocation = bean.getContentLocation(StorageMethod.LOCAL_TEMP);
}
// remove all other locations, as they will become obsolete when OutputStream is written to
while (bean.getContentLocations().size() > 1) {
for (ContentLocation location : bean.getContentLocations()) {
if (location != tempLocalLocation) {
bean.removeContentLocation(location);
break; // remove outside of the iterator, cannot continue
}
}
}
return tempLocalLocation.getHandler().getOutputStream(tempLocalLocation);
}
public void closeContentOutputStreamAndUnlockDataBean(DataBean bean, OutputStream out)
throws MicroarrayException, IOException {
try {
out.close();
} finally {
// this.lock.writeLock().unlock();
}
ContentChangedEvent cce = new ContentChangedEvent(bean);
this.dispatchEventIfVisible(cce);
}
public File getLocalFile(DataBean bean) throws IOException {
ContentLocation location = bean.getContentLocation(StorageMethod.LOCAL_FILE_METHODS);
// convert non local file beans to local file beans
if (location == null) {
this.convertToLocalTempDataBean(bean);
location = bean.getContentLocation(StorageMethod.LOCAL_FILE_METHODS);
}
// get the file
LocalFileContentHandler handler = (LocalFileContentHandler) location.getHandler();
return handler.getFile(location);
}
private void convertToLocalTempDataBean(DataBean bean) throws IOException {
// copy contents to new file
File newFile = this.createNewRepositoryFile(bean.getName());
BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(newFile));
BufferedInputStream in = new BufferedInputStream(bean.getContentStream(DataNotAvailableHandling.EXCEPTION_ON_NA));
try {
IOUtils.copy(in, out);
} finally {
IOUtils.closeIfPossible(in);
IOUtils.closeIfPossible(out);
}
// update url, type and handler in the bean
URL newURL = newFile.toURI().toURL();
addUrl(bean, StorageMethod.LOCAL_TEMP, newURL);
}
private void deleteDataFolder(DataFolder folder) {
// remove children
Iterable<DataItem> children = folder.getChildren();
// make a copy of the children list to avoid concurrent modification
List<DataItem> childrenToBeRemoved = new LinkedList<DataItem>();
for (DataItem item : children) {
childrenToBeRemoved.add(item);
}
// remove all children (recursively)
for (DataItem item : childrenToBeRemoved) {
delete(item);
}
// remove this folder (unless root)
DataFolder parent = folder.getParent();
if (parent != null) {
disconnectChild(folder, parent);
}
}
private String getNamePostfix(String featureName, String factoryName) {
if (factoryName.length() > featureName.length()) {
return "";
} else {
String npf = featureName.substring(factoryName.length());
if (npf.startsWith("/")) {
return npf.substring(1);
} else {
return npf;
}
}
}
public Iterable<File> listAllRepositories() {
LinkedList<File> repositories = new LinkedList<File>();
File tempRoot = getTempRoot();
for (File file: tempRoot.listFiles()) {
if (file.isDirectory() && file.getName().startsWith(TEMP_DIR_PREFIX)) {
String postfix = file.getName().substring(TEMP_DIR_PREFIX.length());
if ("".equals(postfix) || Strings.isIntegerNumber(postfix)) {
repositories.add(file);
}
}
}
return repositories;
}
public void flushSession() {
zipContentHandler.closeZipFiles();
}
public void setModules(LinkedList<Module> modules) {
this.modules = modules;
}
public void connectChild(DataItem child, DataFolder parent) {
// was it already connected?
boolean wasConnected = child.getParent() != null;
// connect to this
child.setParent(parent);
// add
parent.children.add(child);
// add type tags to data beans
if (child instanceof DataBean) {
try {
addTypeTags((DataBean) child);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
// dispatch events if needed
if (!wasConnected) {
dispatchEvent(new DataItemCreatedEvent(child));
}
}
public void disconnectChild(DataItem child, DataFolder parent) {
// remove connections
child.setParent(null);
// remove
parent.children.remove(child);
// dispatch events
dispatchEvent(new DataItemRemovedEvent(child));
}
public void addTypeTags(DataBean data) throws IOException {
for (Module module : modules) {
try {
module.addTypeTags(data);
} catch (MicroarrayException e) {
throw new RuntimeException(e);
}
}
}
/**
* Returns the handler instance of a given StorageMethod. Handler instances are DataManager specific.
*/
private ContentHandler getHandlerFor(StorageMethod method) {
switch (method) {
case LOCAL_SESSION:
return zipContentHandler;
case LOCAL_TEMP:
case LOCAL_USER:
return localFileContentHandler;
case REMOTE_CACHED:
case REMOTE_STORAGE:
return remoteContentHandler;
default:
throw new IllegalArgumentException("unrecognised method: " + method);
}
}
public void addUrl(DataBean bean, StorageMethod method, URL url) {
bean.addContentLocation(new ContentLocation(method, getHandlerFor(method), url));
}
public void putToStorage(DataBean dataBean) throws Exception {
// check if content is still available
if (dataBean.getContentLocations().size() == 0) {
return; // no content, nothing to put to storage
}
// check if already in storage
ContentLocation storageLocation = dataBean.getContentLocation(StorageMethod.REMOTE_STORAGE);
if (storageLocation != null && storageLocation.getHandler().isAccessible(storageLocation)) {
return;
}
// move from cache to storage, if in cache
for (ContentLocation cacheLocation : dataBean.getContentLocations(StorageMethod.REMOTE_CACHED)) {
if (cacheLocation != null && cacheLocation.getHandler().isAccessible(cacheLocation)) {
// move file in filebroker
URL storageURL = Session.getSession().getServiceAccessor().getFileBrokerClient().moveFileToStorage(cacheLocation.getUrl(), dataBean.getContentLength());
dataBean.addContentLocation(new ContentLocation(StorageMethod.REMOTE_STORAGE, getHandlerFor(StorageMethod.REMOTE_STORAGE), storageURL));
// remove cache location(s), because it is now obsolete
dataBean.removeContentLocations(StorageMethod.REMOTE_CACHED);
return;
}
}
// if not in cache, upload to storage
// move from elsewhere to storage
ContentLocation closestLocation = dataBean.getClosestContentLocation();
URL storageURL = Session.getSession().getServiceAccessor().getFileBrokerClient().addFile(FileBrokerArea.STORAGE, closestLocation.getHandler().getInputStream(closestLocation), closestLocation.getHandler().getContentLength(closestLocation), null);
dataBean.addContentLocation(new ContentLocation(StorageMethod.REMOTE_STORAGE, getHandlerFor(StorageMethod.REMOTE_STORAGE), storageURL));
}
/**
*
* @param bean
* @param progressListener
* @return null if no valid location available
* @throws NotEnoughDiskSpaceException
* @throws FileBrokerException
* @throws JMSException
* @throws IOException
* @throws Exception
*/
public URL getURLForCompAndUploadToCacheIfNeeded(DataBean bean, CopyProgressListener progressListener) throws NotEnoughDiskSpaceException, FileBrokerException, JMSException, IOException, Exception {
URL url = null;
try {
bean.getLock().readLock().lock();
// upload only if no valid storage or cached location is found
for (ContentLocation location : bean.getContentLocations(StorageMethod.REMOTE_CACHED, StorageMethod.REMOTE_STORAGE)) {
if (location.getHandler().isAccessible(location)) {
url = location.getUrl();
break;
}
}
// need to upload
if (url == null) {
url = Session.getSession().getServiceAccessor().getFileBrokerClient().addFile(FileBrokerArea.CACHE, bean.getContentStream(DataNotAvailableHandling.EXCEPTION_ON_NA), bean.getContentLength(), progressListener);
bean.removeContentLocations(StorageMethod.REMOTE_CACHED);
addUrl(bean, StorageMethod.REMOTE_CACHED, url);
}
} finally {
bean.getLock().readLock().unlock();
}
return url;
}
}
| src/main/java/fi/csc/microarray/databeans/DataManager.java | package fi.csc.microarray.databeans;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.jms.JMSException;
import javax.swing.Icon;
import org.apache.log4j.Logger;
import org.mortbay.util.IO;
import fi.csc.microarray.client.ClientApplication;
import fi.csc.microarray.client.Session;
import fi.csc.microarray.client.operation.OperationRecord;
import fi.csc.microarray.client.session.SessionLoader;
import fi.csc.microarray.client.session.SessionSaver;
import fi.csc.microarray.databeans.DataBean.ContentLocation;
import fi.csc.microarray.databeans.DataBean.DataNotAvailableHandling;
import fi.csc.microarray.databeans.DataBean.Link;
import fi.csc.microarray.databeans.features.Feature;
import fi.csc.microarray.databeans.features.FeatureProvider;
import fi.csc.microarray.databeans.features.Modifier;
import fi.csc.microarray.databeans.handlers.ContentHandler;
import fi.csc.microarray.databeans.handlers.LocalFileContentHandler;
import fi.csc.microarray.databeans.handlers.RemoteContentHandler;
import fi.csc.microarray.databeans.handlers.ZipContentHandler;
import fi.csc.microarray.exception.MicroarrayException;
import fi.csc.microarray.filebroker.FileBrokerClient.FileBrokerArea;
import fi.csc.microarray.filebroker.FileBrokerException;
import fi.csc.microarray.filebroker.NotEnoughDiskSpaceException;
import fi.csc.microarray.module.Module;
import fi.csc.microarray.util.IOUtils;
import fi.csc.microarray.util.IOUtils.CopyProgressListener;
import fi.csc.microarray.util.Strings;
public class DataManager {
public static enum StorageMethod {
LOCAL_USER(true, true),
LOCAL_TEMP(true, true),
LOCAL_SESSION(true, false),
REMOTE_CACHED(false, true),
REMOTE_STORAGE(false, true);
// Groups that describe how fast different methods are to access.
// Keep these up-to-date when you add methods!
public static StorageMethod[] LOCAL_FILE_METHODS = {LOCAL_USER, LOCAL_TEMP};
public static StorageMethod[] REMOTE_FILE_METHODS = {REMOTE_CACHED, REMOTE_STORAGE};
public static StorageMethod[] OTHER_SLOW_METHODS = {LOCAL_SESSION};
private boolean isLocal;
private boolean isRandomAccess;
StorageMethod(boolean isLocal, boolean isRandomAccess) {
this.isLocal = isLocal;
this.isRandomAccess = isRandomAccess;
}
public boolean isLocal() {
return isLocal;
}
public boolean isRandomAccess() {
return isRandomAccess;
}
}
private static final String TEMP_DIR_PREFIX = "chipster";
private static final int MAX_FILENAME_LENGTH = 256;
private static final Logger logger = Logger.getLogger(DataManager.class);
/**
* Reports session validation related problems.
*/
public static class ValidationException extends Exception {
public ValidationException(String validationDetails) {
// TODO Auto-generated constructor stub
}
}
/**
* The initial name for the root folder.
*/
public final static String ROOT_NAME = "Datasets";
private Map<String, FeatureProvider> factories = new HashMap<String, FeatureProvider>();
private Map<String, Modifier> modifiers = new HashMap<String, Modifier>();
/** MIME types for the DataBeans */
private Map<String, ContentType> contentTypes = new HashMap<String, ContentType>();
/** Mapping file extensions to content types */
private Map<String, String> extensionMap = new HashMap<String, String>();
private LinkedList<DataChangeListener> listeners = new LinkedList<DataChangeListener>();
private boolean eventsEnabled = false;
private DataFolder rootFolder;
private File repositoryRoot;
private LinkedList<Module> modules;
private ZipContentHandler zipContentHandler = new ZipContentHandler();
private LocalFileContentHandler localFileContentHandler = new LocalFileContentHandler();
private RemoteContentHandler remoteContentHandler = new RemoteContentHandler();
public DataManager() throws Exception {
rootFolder = createFolder(DataManager.ROOT_NAME);
// initialize repository
repositoryRoot = createRepository();
}
public void setRootFolder(DataFolder folder) {
this.rootFolder = folder;
}
public File getRepository() {
return repositoryRoot;
}
/**
* Returns the root folder, acting as a gateway into the actual data
* content under this manager.
*/
public DataFolder getRootFolder() {
return rootFolder;
}
public boolean isRootFolder(DataFolder folder) {
return (rootFolder == folder) && (rootFolder != null);
}
/**
* Creates a folder under this manager. Folder will be created without parent.
*
* @param name name for the new folder
*/
public DataFolder createFolder(String name) {
DataFolder folder = new DataFolder(this, name);
return folder;
}
/**
* Creates a folder under this manager.
*
* @param parent under which folder the new folder is to be created
* @param name name for the new folder
*/
public DataFolder createFolder(DataFolder root, String name) {
DataFolder folder = new DataFolder(this, name);
connectChild(folder, root); // events are dispatched from here
return folder;
}
/**
* Adds a listener listening to changes in beans and folders of this manager.
*/
public void addDataChangeListener(DataChangeListener listener) {
logger.debug("adding DataChangeListener: " + listener);
if (listener == null) {
throw new IllegalArgumentException("listener cannot be null");
}
listeners.add(listener);
}
/**
* Creates a new empty file in the repository managed by this FSDataManager.
* All the files in this repository should be created by this method.
*
* The actual contents of the files may be added either by the
* createDataBean(..., InputStream) methods of this manager, or
* externally and then using the createDataBean(... File) methods
* to create the DataBean.
*
* This is needed to to avoid overwriting data, in the case of
* duplicate DataBean names.
*
* @author Taavi Hupponen
*
* @param beanName
* @return
* @throws IOException
*/
public synchronized File createNewRepositoryFile(String beanName) throws IOException {
String fileName = beanName.replaceAll("[^\\w\\.\\-_]", "");
if (fileName.length() < 1) {
fileName = "data";
} else if (fileName.length() > MAX_FILENAME_LENGTH) {
fileName = fileName.substring(0, MAX_FILENAME_LENGTH);
}
File file = new File(this.repositoryRoot, fileName);
// if file with the beanName already exists, add running number to the name
int indexOfDot = fileName.lastIndexOf(".");
String newFileName = "";
for (int i = 1; file.exists() && i < Integer.MAX_VALUE; i++) {
// no dot add to end
if (indexOfDot < 0 ) {
newFileName = fileName + "-" + i;
}
// add before last dot
else {
newFileName = fileName.substring(0, indexOfDot) + "-" + i + fileName.substring(indexOfDot, fileName.length());
}
file = new File(this.repositoryRoot, newFileName);
}
// create the file
if (!file.createNewFile()) {
throw new IOException("Could not create file " + fileName);
}
// return the file
file.deleteOnExit();
return file;
}
private File createRepository() throws IOException {
// get temp dir
File tempRoot = getTempRoot();
if (!tempRoot.canWrite()) {
// give up
throw new IOException("Could not create repository directory.");
}
String fileName = TEMP_DIR_PREFIX;
File repository = new File(tempRoot, fileName);
// if directory with that name already exists, add running number
boolean repositoryCreated = false;
for (int i = 1; !repositoryCreated && i < 1000; i++) {
repositoryCreated = repository.mkdir();
if (!repositoryCreated) {
repository = new File(tempRoot, fileName + "-" + i);
}
}
if (!repositoryCreated) {
throw new IOException("Could not create repository directory.");
}
repository.deleteOnExit();
return repository;
}
private File getTempRoot() {
File tempDir = new File(System.getProperty("java.io.tmpdir"));
// check if temp dir is writeable
if (!tempDir.canWrite()) {
// try home dir
tempDir = new File(System.getProperty("user.home"));
if (!tempDir.canWrite()) {
// try current working dir
tempDir = new File(System.getProperty("user.dir"));
}
}
return tempDir;
}
/**
* @param enabled if property change events should be sent
* @see #addDataChangeListener(DataChangeListener)
*/
public void setEventsEnabled(boolean enabled) {
this.eventsEnabled = enabled;
}
public void dispatchEventIfVisible(DataChangeEvent event) {
if (event.getDataItem().getParent() != null) {
dispatchEvent(event);
}
}
public void dispatchEvent(DataChangeEvent event) {
if (eventsEnabled) {
// dispatch events only for connected datas
for (DataChangeListener listener : listeners) {
if (listener == null) {
logger.error("One of the DataChangeListeners listeners was null.");
} else {
logger.debug("Notifying DataChangeListener " + listener.toString());
}
try {
listener.dataChanged(event);
} catch (RuntimeException e) {
// we will not let GUI problems to stop important DataBean manipulation operations
// and possibly lead to DataBean model corruption
logger.error("DataChangeEvent dispatch failed", e);
}
}
}
}
public static DataBean[] wrapSource(DataBean source) {
DataBean[] sources = null;
if (source != null) {
sources = new DataBean[1];
sources[0] = source;
} else {
sources = new DataBean[0];
}
return sources;
}
/**
* Guess the MIME content type using the filename.
*
* For now, simply use the extension to figure out the mime type.
*
* Types are plugged at ApplicationConstants.
*
*/
public ContentType guessContentType(String name) {
ContentType type = null;
if(name.lastIndexOf(".") != -1){
String extension = name.substring(name.lastIndexOf(".") + 1, name.length()).toLowerCase();
String typeName = extensionMap.get(extension);
if (typeName != null) {
type = contentTypes.get(typeName);
}
}
if (type == null) {
type = contentTypes.get("application/octet-stream");
}
return type;
}
/**
* Guesses MIME content type from a filename and possibly file content.
*/
public ContentType guessContentType(File file) {
return guessContentType(file.getName());
}
/**
* @return MIME content type for a given extension
*/
public ContentType getContentType(String typeName) {
return contentTypes.get(typeName);
}
/**
* Plugs a MIME content type, so that it can be used in all beans under this manager.
*
* @param mimeType MIME name
* @param supported is this a known (supported directly) content type?
* @param description a short textual description
* @param extensions file extensions belonging to this type
*/
public void plugContentType(String mimeType, boolean supported, boolean binary, String description, Icon icon, String... extensions) {
// create the content type
contentTypes.put(mimeType, new ContentType(mimeType, supported, binary, description, icon, extensions));
// add extensions to search map
for (String extension: extensions) {
extensionMap.put(extension, mimeType);
}
}
/**
* Plugs a modifier (part of Feature API), so that it can be used in all beans under this manager.
*/
public void plugModifier(String name, Modifier modifier) {
modifiers.put(name, modifier);
}
/**
* Plugs a feature factory, so that it can be used in all beans under this manager.
*/
public void plugFeatureFactory(String name, FeatureProvider plugin) {
logger.debug("plugged " + plugin.getClass().getSimpleName() + " at " + name);
plugin.setName(name);
factories.put(name, plugin);
}
public Modifier fetchModifier(String modifierName) {
return modifiers.get(modifierName);
}
public Feature fetchFeature(String featureName, DataBean bean) {
String bestMatch = null;
for (String feature : factories.keySet()) {
if (featureName.startsWith(feature)) {
if (bestMatch == null || feature.length() > bestMatch.length()) {
// current best match
bestMatch = feature;
}
}
}
FeatureProvider factory = factories.get(bestMatch);
if (factory == null) {
throw new RuntimeException("no feature factory plugged in for \"" + featureName + "\" (total of " + factories.size() + " factories plugged)");
}
logger.debug("best match for " + featureName + " was " + (factory != null ? factory.getName() : factory));
String namePostfix = getNamePostfix(featureName, factory.getName());
return factory.createFeature(namePostfix, bean);
}
/**
* Find and return the first DataItem with the given name.
* @param name the name of the DataItem being search for
* @return the first found DataItem with given name
*/
public DataItem findDataItem(String name) {
return findDataItem(name, getRootFolder());
}
private DataItem findDataItem(String name, DataItem root) {
DataItem matchingItem = null;
// root item matches
if (root.getName().equals(name)) {
return root;
}
// root is a folder, search children
else if (root instanceof DataFolder) {
for (DataItem child: ((DataFolder)root).getChildren()) {
matchingItem = findDataItem(name, child);
if (matchingItem != null) {
return matchingItem;
}
}
}
// no match found
return null;
}
/**
* Find and return the first DataBean with the given name.
* @param name the name of the DataBean being search for
* @return the first found DataBean with given name
*/
public DataBean getDataBean(String name) {
for (DataBean dataBean : databeans()) {
if (dataBean.getName().equals(name)) {
return dataBean;
}
}
return null;
}
/**
* Create a local temporary file DataBean without content, without a parent folder and without sources.
* If a reference to this bean is lost it can not be accessed any more.
*/
public DataBean createLocalTempDataBean(String name) throws MicroarrayException {
try {
File contentFile = createNewRepositoryFile(name);
DataBean bean = createDataBean(name);
addUrl(bean, StorageMethod.LOCAL_TEMP, contentFile.toURI().toURL());
return bean;
} catch (IOException e) {
throw new MicroarrayException(e);
}
}
/**
* Creates new DataBean. Infers content type of the created DataBean from the name.
*
* @param name name of the DataBean
* @return new DataBean that is not connected to a DataFolder
*/
public DataBean createDataBean(String name) throws MicroarrayException {
DataBean data = new DataBean(name, guessContentType(name), this);
return data;
}
/**
* Convenience method for creating a local file DataBean. Initialises the DataBean with local file
* location. The file is used directly, the contents are not copied anywhere.
*
*/
public DataBean createDataBean(String name, File contentFile) throws MicroarrayException {
try {
DataBean bean = createDataBean(name);
addUrl(bean, StorageMethod.LOCAL_USER, contentFile.toURI().toURL());
return bean;
} catch (IOException e) {
throw new MicroarrayException(e);
}
}
/**
* Convenience method for creating a local temporary file DataBean with content.
* Content stream is read into a temp file and location of the file is stored
* to DataBean.
*/
public DataBean createDataBean(String name, InputStream content) throws MicroarrayException {
// copy the data from the input stream to the file in repository
File contentFile;
try {
contentFile = createNewRepositoryFile(name);
InputStream input = new BufferedInputStream(content);
OutputStream output = new BufferedOutputStream(new FileOutputStream(contentFile));
IO.copy(input, output);
input.close();
output.flush();
output.close();
} catch (IOException ioe) {
throw new MicroarrayException(ioe);
}
// create and return the bean
DataBean bean = createDataBean(name);
try {
addUrl(bean, StorageMethod.LOCAL_TEMP, contentFile.toURI().toURL());
} catch (MalformedURLException e) {
throw new MicroarrayException(e);
}
return bean;
}
/**
* Load session from a file.
*
* @see #saveSession(File, ClientApplication)
*/
public void loadSession(File sessionFile, boolean isDataless) throws Exception {
SessionLoader sessionLoader = new SessionLoader(sessionFile, isDataless, this);
sessionLoader.loadSession();
}
/**
* Saves session (all data: beans, folder structure, operation metadata, links etc.) to a file.
* File is a zip file with all the data files and one metadata file.
*
* @return true if the session was saved perfectly
* @throws Exception
*/
public void saveSession(File sessionFile) throws Exception {
// save session file
boolean metadataValid = false;
SessionSaver sessionSaver = new SessionSaver(sessionFile, this);
metadataValid = sessionSaver.saveSession();
// check validation
if (!metadataValid) {
// save was successful but metadata validation failed, file might be usable
String validationDetails = sessionSaver.getValidationErrors();
throw new ValidationException(validationDetails);
}
}
/**
* Saves lightweight session (folder structure, operation metadata, links etc.) to a file.
* Does not save actual data inside databeans.
*
* @return true if the session was saved perfectly
* @throws Exception
*/
public void saveLightweightSession(File sessionFile) throws Exception {
SessionSaver sessionSaver = new SessionSaver(sessionFile, this);
sessionSaver.saveLightweightSession();
}
/**
* Returns debug print out of current session state.
*
* @return print out of session state
*/
public String printSession() {
StringBuffer buffer = new StringBuffer();
SessionSaver.dumpSession(rootFolder, buffer);
return buffer.toString();
}
public void saveStorageSession(File sessionFile) throws Exception {
SessionSaver sessionSaver = new SessionSaver(sessionFile, this);
sessionSaver.saveStorageSession();
}
/**
* Delete DataItem and its children (if any). Root folder cannot be removed.
*
* @param data item to be deleted
*/
public void delete(DataItem data) {
if (data instanceof DataFolder) {
deleteDataFolder((DataFolder)data);
} else {
deleteDataBean((DataBean)data);
}
}
/**
* Remove all DataBeans and DataFolders, except for the root folder.
*/
public void deleteAllDataItems() {
deleteDataFolder(getRootFolder());
}
private void deleteDataBean(DataBean bean) {
// remove from operation history
for (DataBean source : databeans()) {
// we must iterate all datas because links cannot be trusted (they might have been removed by user)
OperationRecord operationRecord = source.getOperationRecord();
if (operationRecord != null) {
operationRecord.removeInput(bean);
}
}
// remove links
for (Link linkType : Link.values()) {
// Remove outgoing links
for (DataBean target : bean.getLinkTargets(linkType)) {
bean.removeLink(linkType, target);
}
// Remove incoming links
for (DataBean source : bean.getLinkSources(linkType)) {
source.removeLink(linkType, bean);
}
}
// remove bean
DataFolder folder = bean.getParent();
if (folder != null) {
disconnectChild(bean, folder);
}
// remove physical file
bean.delete();
}
/**
* Return all DataBeans under this manager.
*/
public List<DataBean> databeans() {
LinkedList<DataBean> databeans = new LinkedList<DataBean>();
for (DataFolder folder : folders()) {
for (DataItem child : folder.getChildren()) {
if (child instanceof DataBean) {
databeans.add((DataBean) child);
}
}
}
return databeans;
}
/**
* Return all DataFolders under this manager.
*/
public List<DataFolder> folders() {
return folders(getRootFolder());
}
public List<DataFolder> folders(DataFolder parent) {
LinkedList<DataFolder> folders = new LinkedList<DataFolder>();
folders.add(parent);
for (DataItem child : parent.getChildren()) {
if (child instanceof DataFolder) {
folders.addAll(folders((DataFolder) child));
}
}
return folders;
}
public OutputStream getContentOutputStreamAndLockDataBean(DataBean bean) throws IOException {
// only local temp beans support output, so convert to local temp bean if needed
ContentLocation tempLocalLocation = bean.getContentLocation(StorageMethod.LOCAL_TEMP);
if (tempLocalLocation == null) {
this.convertToLocalTempDataBean(bean);
tempLocalLocation = bean.getContentLocation(StorageMethod.LOCAL_TEMP);
}
// remove all other locations, as they will become obsolete when OutputStream is written to
while (bean.getContentLocations().size() > 1) {
for (ContentLocation location : bean.getContentLocations()) {
if (location != tempLocalLocation) {
bean.removeContentLocation(location);
break; // remove outside of the iterator, cannot continue
}
}
}
return tempLocalLocation.getHandler().getOutputStream(tempLocalLocation);
}
public void closeContentOutputStreamAndUnlockDataBean(DataBean bean, OutputStream out)
throws MicroarrayException, IOException {
try {
out.close();
} finally {
// this.lock.writeLock().unlock();
}
ContentChangedEvent cce = new ContentChangedEvent(bean);
this.dispatchEventIfVisible(cce);
}
public File getLocalFile(DataBean bean) throws IOException {
ContentLocation location = bean.getContentLocation(StorageMethod.LOCAL_FILE_METHODS);
// convert non local file beans to local file beans
if (location == null) {
this.convertToLocalTempDataBean(bean);
location = bean.getContentLocation(StorageMethod.LOCAL_FILE_METHODS);
}
// get the file
LocalFileContentHandler handler = (LocalFileContentHandler) location.getHandler();
return handler.getFile(location);
}
private void convertToLocalTempDataBean(DataBean bean) throws IOException {
// copy contents to new file
File newFile = this.createNewRepositoryFile(bean.getName());
BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(newFile));
BufferedInputStream in = new BufferedInputStream(bean.getContentStream(DataNotAvailableHandling.EXCEPTION_ON_NA));
try {
IOUtils.copy(in, out);
} finally {
IOUtils.closeIfPossible(in);
IOUtils.closeIfPossible(out);
}
// update url, type and handler in the bean
URL newURL = newFile.toURI().toURL();
addUrl(bean, StorageMethod.LOCAL_TEMP, newURL);
}
private void deleteDataFolder(DataFolder folder) {
// remove children
Iterable<DataItem> children = folder.getChildren();
// make a copy of the children list to avoid concurrent modification
List<DataItem> childrenToBeRemoved = new LinkedList<DataItem>();
for (DataItem item : children) {
childrenToBeRemoved.add(item);
}
// remove all children (recursively)
for (DataItem item : childrenToBeRemoved) {
delete(item);
}
// remove this folder (unless root)
DataFolder parent = folder.getParent();
if (parent != null) {
disconnectChild(folder, parent);
}
}
private String getNamePostfix(String featureName, String factoryName) {
if (factoryName.length() > featureName.length()) {
return "";
} else {
String npf = featureName.substring(factoryName.length());
if (npf.startsWith("/")) {
return npf.substring(1);
} else {
return npf;
}
}
}
public Iterable<File> listAllRepositories() {
LinkedList<File> repositories = new LinkedList<File>();
File tempRoot = getTempRoot();
for (File file: tempRoot.listFiles()) {
if (file.isDirectory() && file.getName().startsWith(TEMP_DIR_PREFIX)) {
String postfix = file.getName().substring(TEMP_DIR_PREFIX.length());
if ("".equals(postfix) || Strings.isIntegerNumber(postfix)) {
repositories.add(file);
}
}
}
return repositories;
}
public void flushSession() {
zipContentHandler.closeZipFiles();
}
public void setModules(LinkedList<Module> modules) {
this.modules = modules;
}
public void connectChild(DataItem child, DataFolder parent) {
// was it already connected?
boolean wasConnected = child.getParent() != null;
// connect to this
child.setParent(parent);
// add
parent.children.add(child);
// add type tags to data beans
if (child instanceof DataBean) {
try {
addTypeTags((DataBean) child);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
// dispatch events if needed
if (!wasConnected) {
dispatchEvent(new DataItemCreatedEvent(child));
}
}
public void disconnectChild(DataItem child, DataFolder parent) {
// remove connections
child.setParent(null);
// remove
parent.children.remove(child);
// dispatch events
dispatchEvent(new DataItemRemovedEvent(child));
}
public void addTypeTags(DataBean data) throws IOException {
for (Module module : modules) {
try {
module.addTypeTags(data);
} catch (MicroarrayException e) {
throw new RuntimeException(e);
}
}
}
/**
* Returns the handler instance of a given StorageMethod. Handler instances are DataManager specific.
*/
private ContentHandler getHandlerFor(StorageMethod method) {
switch (method) {
case LOCAL_SESSION:
return zipContentHandler;
case LOCAL_TEMP:
case LOCAL_USER:
return localFileContentHandler;
case REMOTE_CACHED:
case REMOTE_STORAGE:
return remoteContentHandler;
default:
throw new IllegalArgumentException("unrecognised method: " + method);
}
}
public void addUrl(DataBean bean, StorageMethod method, URL url) {
bean.addContentLocation(new ContentLocation(method, getHandlerFor(method), url));
}
public void putToStorage(DataBean dataBean) throws Exception {
// check if content is still available
if (dataBean.getContentLocations().size() == 0) {
return; // no content, nothing to put to storage
}
// check if already in storage
ContentLocation storageLocation = dataBean.getContentLocation(StorageMethod.REMOTE_STORAGE);
if (storageLocation != null && storageLocation.getHandler().isAccessible(storageLocation)) {
return;
}
// move from cache to storage, if in cache
for (ContentLocation cacheLocation : dataBean.getContentLocations(StorageMethod.REMOTE_CACHED)) {
if (cacheLocation != null && cacheLocation.getHandler().isAccessible(cacheLocation)) {
// move file in filebroker
URL storageURL = Session.getSession().getServiceAccessor().getFileBrokerClient().moveFileToStorage(cacheLocation.getUrl(), dataBean.getContentLength());
dataBean.addContentLocation(new ContentLocation(StorageMethod.REMOTE_STORAGE, getHandlerFor(StorageMethod.REMOTE_STORAGE), storageURL));
// remove cache location(s), because it is now obsolete
dataBean.removeContentLocations(StorageMethod.REMOTE_CACHED);
return;
}
}
// if not in cache, upload to storage
// move from elsewhere to storage
ContentLocation closestLocation = dataBean.getClosestContentLocation();
Session.getSession().getServiceAccessor().getFileBrokerClient().addFile(FileBrokerArea.STORAGE, closestLocation.getHandler().getInputStream(closestLocation), closestLocation.getHandler().getContentLength(closestLocation), null);
}
/**
*
* @param bean
* @param progressListener
* @return null if no valid location available
* @throws NotEnoughDiskSpaceException
* @throws FileBrokerException
* @throws JMSException
* @throws IOException
* @throws Exception
*/
public URL getURLForCompAndUploadToCacheIfNeeded(DataBean bean, CopyProgressListener progressListener) throws NotEnoughDiskSpaceException, FileBrokerException, JMSException, IOException, Exception {
URL url = null;
try {
bean.getLock().readLock().lock();
// upload only if no valid storage or cached location is found
for (ContentLocation location : bean.getContentLocations(StorageMethod.REMOTE_CACHED, StorageMethod.REMOTE_STORAGE)) {
if (location.getHandler().isAccessible(location)) {
url = location.getUrl();
break;
}
}
// need to upload
if (url == null) {
url = Session.getSession().getServiceAccessor().getFileBrokerClient().addFile(FileBrokerArea.CACHE, bean.getContentStream(DataNotAvailableHandling.EXCEPTION_ON_NA), bean.getContentLength(), progressListener);
bean.removeContentLocations(StorageMethod.REMOTE_CACHED);
addUrl(bean, StorageMethod.REMOTE_CACHED, url);
}
} finally {
bean.getLock().readLock().unlock();
}
return url;
}
}
| fixed bug in URL bookkeeping
| src/main/java/fi/csc/microarray/databeans/DataManager.java | fixed bug in URL bookkeeping | <ide><path>rc/main/java/fi/csc/microarray/databeans/DataManager.java
<ide>
<ide> // move from elsewhere to storage
<ide> ContentLocation closestLocation = dataBean.getClosestContentLocation();
<del> Session.getSession().getServiceAccessor().getFileBrokerClient().addFile(FileBrokerArea.STORAGE, closestLocation.getHandler().getInputStream(closestLocation), closestLocation.getHandler().getContentLength(closestLocation), null);
<del>
<add> URL storageURL = Session.getSession().getServiceAccessor().getFileBrokerClient().addFile(FileBrokerArea.STORAGE, closestLocation.getHandler().getInputStream(closestLocation), closestLocation.getHandler().getContentLength(closestLocation), null);
<add> dataBean.addContentLocation(new ContentLocation(StorageMethod.REMOTE_STORAGE, getHandlerFor(StorageMethod.REMOTE_STORAGE), storageURL));
<ide> }
<ide>
<ide> /** |
|
Java | apache-2.0 | f2fe7f335c3b55cb2a2f9bd4cada56dc2ad35633 | 0 | spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework | /*
* Copyright 2002-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.context.annotation;
import java.util.Map;
import java.util.regex.Pattern;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.BeanPostProcessor;
import org.springframework.beans.factory.support.RootBeanDefinition;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation6.ComponentForScanning;
import org.springframework.context.annotation6.ConfigForScanning;
import org.springframework.context.annotation6.Jsr330NamedForScanning;
import org.springframework.core.ResolvableType;
import org.springframework.util.ObjectUtils;
import static java.lang.String.format;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import static org.springframework.util.StringUtils.uncapitalize;
/**
* @author Chris Beams
* @author Juergen Hoeller
*/
@SuppressWarnings("resource")
class AnnotationConfigApplicationContextTests {
@Test
void scanAndRefresh() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.scan("org.springframework.context.annotation6");
context.refresh();
context.getBean(uncapitalize(ConfigForScanning.class.getSimpleName()));
context.getBean("testBean"); // contributed by ConfigForScanning
context.getBean(uncapitalize(ComponentForScanning.class.getSimpleName()));
context.getBean(uncapitalize(Jsr330NamedForScanning.class.getSimpleName()));
Map<String, Object> beans = context.getBeansWithAnnotation(Configuration.class);
assertThat(beans).hasSize(1);
}
@Test
void registerAndRefresh() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.register(Config.class, NameConfig.class);
context.refresh();
context.getBean("testBean");
context.getBean("name");
Map<String, Object> beans = context.getBeansWithAnnotation(Configuration.class);
assertThat(beans).hasSize(2);
}
@Test
void getBeansWithAnnotation() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.register(Config.class, NameConfig.class, UntypedFactoryBean.class);
context.refresh();
context.getBean("testBean");
context.getBean("name");
Map<String, Object> beans = context.getBeansWithAnnotation(Configuration.class);
assertThat(beans).hasSize(2);
}
@Test
void getBeanByType() {
ApplicationContext context = new AnnotationConfigApplicationContext(Config.class);
TestBean testBean = context.getBean(TestBean.class);
assertThat(testBean).isNotNull();
assertThat(testBean.name).isEqualTo("foo");
}
@Test
void getBeanByTypeRaisesNoSuchBeanDefinitionException() {
ApplicationContext context = new AnnotationConfigApplicationContext(Config.class);
// attempt to retrieve a bean that does not exist
Class<?> targetType = Pattern.class;
assertThatExceptionOfType(NoSuchBeanDefinitionException.class).isThrownBy(() ->
context.getBean(targetType))
.withMessageContaining(format("No qualifying bean of type '%s'", targetType.getName()));
}
@Test
void getBeanByTypeAmbiguityRaisesException() {
ApplicationContext context = new AnnotationConfigApplicationContext(TwoTestBeanConfig.class);
assertThatExceptionOfType(NoSuchBeanDefinitionException.class).isThrownBy(() ->
context.getBean(TestBean.class))
.withMessageContaining("No qualifying bean of type '" + TestBean.class.getName() + "'")
.withMessageContaining("tb1")
.withMessageContaining("tb2");
}
/**
* Tests that Configuration classes are registered according to convention
* @see org.springframework.beans.factory.support.DefaultBeanNameGenerator#generateBeanName
*/
@Test
void defaultConfigClassBeanNameIsGeneratedProperly() {
ApplicationContext context = new AnnotationConfigApplicationContext(Config.class);
// attempt to retrieve the instance by its generated bean name
Config configObject = (Config) context.getBean("annotationConfigApplicationContextTests.Config");
assertThat(configObject).isNotNull();
}
/**
* Tests that specifying @Configuration(value="foo") results in registering
* the configuration class with bean name 'foo'.
*/
@Test
void explicitConfigClassBeanNameIsRespected() {
ApplicationContext context = new AnnotationConfigApplicationContext(ConfigWithCustomName.class);
// attempt to retrieve the instance by its specified name
ConfigWithCustomName configObject = (ConfigWithCustomName) context.getBean("customConfigBeanName");
assertThat(configObject).isNotNull();
}
@Test
void autowiringIsEnabledByDefault() {
ApplicationContext context = new AnnotationConfigApplicationContext(AutowiredConfig.class);
assertThat(context.getBean(TestBean.class).name).isEqualTo("foo");
}
@Test
void nullReturningBeanPostProcessor() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.register(AutowiredConfig.class);
// 1st BPP always gets invoked
context.getBeanFactory().addBeanPostProcessor(new BeanPostProcessor() {
@Override
public Object postProcessBeforeInitialization(Object bean, String beanName) {
if (bean instanceof TestBean) {
TestBean testBean = (TestBean) bean;
testBean.name = testBean.name + "-before";
}
return bean;
}
@Override
public Object postProcessAfterInitialization(Object bean, String beanName) {
if (bean instanceof TestBean) {
TestBean testBean = (TestBean) bean;
testBean.name = testBean.name + "-after";
}
return bean;
}
});
// 2nd BPP always returns null for a TestBean
context.getBeanFactory().addBeanPostProcessor(new BeanPostProcessor() {
@Override
public Object postProcessBeforeInitialization(Object bean, String beanName) {
return (bean instanceof TestBean ? null : bean);
}
@Override
public Object postProcessAfterInitialization(Object bean, String beanName) {
return (bean instanceof TestBean ? null : bean);
}
});
// 3rd BPP never gets invoked with a TestBean
context.getBeanFactory().addBeanPostProcessor(new BeanPostProcessor() {
@Override
public Object postProcessBeforeInitialization(Object bean, String beanName) {
assertThat(bean).isNotInstanceOf(TestBean.class);
return bean;
}
@Override
public Object postProcessAfterInitialization(Object bean, String beanName) {
assertThat(bean).isNotInstanceOf(TestBean.class);
return bean;
}
});
context.refresh();
TestBean testBean = context.getBean(TestBean.class);
assertThat(testBean).isNotNull();
assertThat(testBean.name).isEqualTo("foo-before-after");
}
@Test
void individualBeans() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.register(BeanA.class, BeanB.class, BeanC.class);
context.refresh();
assertThat(context.getBean(BeanA.class).b).isSameAs(context.getBean(BeanB.class));
assertThat(context.getBean(BeanA.class).c).isSameAs(context.getBean(BeanC.class));
assertThat(context.getBean(BeanB.class).applicationContext).isSameAs(context);
}
@Test
void individualNamedBeans() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.registerBean("a", BeanA.class);
context.registerBean("b", BeanB.class);
context.registerBean("c", BeanC.class);
context.refresh();
assertThat(context.getBean("a", BeanA.class).b).isSameAs(context.getBean("b"));
assertThat(context.getBean("a", BeanA.class).c).isSameAs(context.getBean("c"));
assertThat(context.getBean("b", BeanB.class).applicationContext).isSameAs(context);
}
@Test
void individualBeanWithSupplier() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.registerBean(BeanA.class,
() -> new BeanA(context.getBean(BeanB.class), context.getBean(BeanC.class)));
context.registerBean(BeanB.class, BeanB::new);
context.registerBean(BeanC.class, BeanC::new);
context.refresh();
assertThat(context.getBeanFactory().containsSingleton("annotationConfigApplicationContextTests.BeanA")).isTrue();
assertThat(context.getBean(BeanA.class).b).isSameAs(context.getBean(BeanB.class));
assertThat(context.getBean(BeanA.class).c).isSameAs(context.getBean(BeanC.class));
assertThat(context.getBean(BeanB.class).applicationContext).isSameAs(context);
assertThat(context.getDefaultListableBeanFactory().getDependentBeans("annotationConfigApplicationContextTests.BeanB"))
.containsExactly("annotationConfigApplicationContextTests.BeanA");
assertThat(context.getDefaultListableBeanFactory().getDependentBeans("annotationConfigApplicationContextTests.BeanC"))
.containsExactly("annotationConfigApplicationContextTests.BeanA");
}
@Test
void individualBeanWithSupplierAndCustomizer() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.registerBean(BeanA.class,
() -> new BeanA(context.getBean(BeanB.class), context.getBean(BeanC.class)),
bd -> bd.setLazyInit(true));
context.registerBean(BeanB.class, BeanB::new);
context.registerBean(BeanC.class, BeanC::new);
context.refresh();
assertThat(context.getBeanFactory().containsSingleton("annotationConfigApplicationContextTests.BeanA")).isFalse();
assertThat(context.getBean(BeanA.class).b).isSameAs(context.getBean(BeanB.class));
assertThat(context.getBean(BeanA.class).c).isSameAs(context.getBean(BeanC.class));
assertThat(context.getBean(BeanB.class).applicationContext).isSameAs(context);
}
@Test
void individualNamedBeanWithSupplier() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.registerBean("a", BeanA.class,
() -> new BeanA(context.getBean(BeanB.class), context.getBean(BeanC.class)));
context.registerBean("b", BeanB.class, BeanB::new);
context.registerBean("c", BeanC.class, BeanC::new);
context.refresh();
assertThat(context.getBeanFactory().containsSingleton("a")).isTrue();
assertThat(context.getBean(BeanA.class).b).isSameAs(context.getBean("b", BeanB.class));
assertThat(context.getBean("a", BeanA.class).c).isSameAs(context.getBean("c"));
assertThat(context.getBean("b", BeanB.class).applicationContext).isSameAs(context);
}
@Test
void individualNamedBeanWithSupplierAndCustomizer() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.registerBean("a", BeanA.class,
() -> new BeanA(context.getBean(BeanB.class), context.getBean(BeanC.class)),
bd -> bd.setLazyInit(true));
context.registerBean("b", BeanB.class, BeanB::new);
context.registerBean("c", BeanC.class, BeanC::new);
context.refresh();
assertThat(context.getBeanFactory().containsSingleton("a")).isFalse();
assertThat(context.getBean(BeanA.class).b).isSameAs(context.getBean("b", BeanB.class));
assertThat(context.getBean("a", BeanA.class).c).isSameAs(context.getBean("c"));
assertThat(context.getBean("b", BeanB.class).applicationContext).isSameAs(context);
}
@Test
void individualBeanWithNullReturningSupplier() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.registerBean("a", BeanA.class, () -> null);
context.registerBean("b", BeanB.class, BeanB::new);
context.registerBean("c", BeanC.class, BeanC::new);
context.refresh();
assertThat(ObjectUtils.containsElement(context.getBeanNamesForType(BeanA.class), "a")).isTrue();
assertThat(ObjectUtils.containsElement(context.getBeanNamesForType(BeanB.class), "b")).isTrue();
assertThat(ObjectUtils.containsElement(context.getBeanNamesForType(BeanC.class), "c")).isTrue();
assertThat(context.getBeansOfType(BeanA.class)).isEmpty();
assertThat(context.getBeansOfType(BeanB.class).values().iterator().next()).isSameAs(context.getBean(BeanB.class));
assertThat(context.getBeansOfType(BeanC.class).values().iterator().next()).isSameAs(context.getBean(BeanC.class));
assertThatExceptionOfType(NoSuchBeanDefinitionException.class).isThrownBy(() ->
context.getBeanFactory().resolveNamedBean(BeanA.class));
assertThat(context.getBeanFactory().resolveNamedBean(BeanB.class).getBeanInstance()).isSameAs(context.getBean(BeanB.class));
assertThat(context.getBeanFactory().resolveNamedBean(BeanC.class).getBeanInstance()).isSameAs(context.getBean(BeanC.class));
}
@Test
void individualBeanWithSpecifiedConstructorArguments() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
BeanB b = new BeanB();
BeanC c = new BeanC();
context.registerBean(BeanA.class, b, c);
context.refresh();
assertThat(context.getBean(BeanA.class).b).isSameAs(b);
assertThat(context.getBean(BeanA.class).c).isSameAs(c);
assertThat(b.applicationContext).isNull();
}
@Test
void individualNamedBeanWithSpecifiedConstructorArguments() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
BeanB b = new BeanB();
BeanC c = new BeanC();
context.registerBean("a", BeanA.class, b, c);
context.refresh();
assertThat(context.getBean("a", BeanA.class).b).isSameAs(b);
assertThat(context.getBean("a", BeanA.class).c).isSameAs(c);
assertThat(b.applicationContext).isNull();
}
@Test
void individualBeanWithMixedConstructorArguments() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
BeanC c = new BeanC();
context.registerBean(BeanA.class, c);
context.registerBean(BeanB.class);
context.refresh();
assertThat(context.getBean(BeanA.class).b).isSameAs(context.getBean(BeanB.class));
assertThat(context.getBean(BeanA.class).c).isSameAs(c);
assertThat(context.getBean(BeanB.class).applicationContext).isSameAs(context);
}
@Test
void individualNamedBeanWithMixedConstructorArguments() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
BeanC c = new BeanC();
context.registerBean("a", BeanA.class, c);
context.registerBean("b", BeanB.class);
context.refresh();
assertThat(context.getBean("a", BeanA.class).b).isSameAs(context.getBean("b", BeanB.class));
assertThat(context.getBean("a", BeanA.class).c).isSameAs(c);
assertThat(context.getBean("b", BeanB.class).applicationContext).isSameAs(context);
}
@Test
void individualBeanWithFactoryBeanSupplier() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.registerBean("fb", NonInstantiatedFactoryBean.class, NonInstantiatedFactoryBean::new, bd -> bd.setLazyInit(true));
context.refresh();
assertThat(context.getType("fb")).isEqualTo(String.class);
assertThat(context.getType("&fb")).isEqualTo(NonInstantiatedFactoryBean.class);
assertThat(context.getBeanNamesForType(FactoryBean.class)).hasSize(1);
assertThat(context.getBeanNamesForType(NonInstantiatedFactoryBean.class)).hasSize(1);
}
@Test
void individualBeanWithFactoryBeanSupplierAndTargetType() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
RootBeanDefinition bd = new RootBeanDefinition();
bd.setInstanceSupplier(NonInstantiatedFactoryBean::new);
bd.setTargetType(ResolvableType.forClassWithGenerics(FactoryBean.class, String.class));
bd.setLazyInit(true);
context.registerBeanDefinition("fb", bd);
context.refresh();
assertThat(context.getType("fb")).isEqualTo(String.class);
assertThat(context.getType("&fb")).isEqualTo(FactoryBean.class);
assertThat(context.getBeanNamesForType(FactoryBean.class)).hasSize(1);
assertThat(context.getBeanNamesForType(NonInstantiatedFactoryBean.class)).isEmpty();
}
@Test
void individualBeanWithFactoryBeanObjectTypeAsTargetType() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
RootBeanDefinition bd = new RootBeanDefinition();
bd.setBeanClass(TypedFactoryBean.class);
bd.setTargetType(String.class);
context.registerBeanDefinition("fb", bd);
context.refresh();
assertThat(context.getType("&fb")).isEqualTo(TypedFactoryBean.class);
assertThat(context.getType("fb")).isEqualTo(String.class);
assertThat(context.getBeanNamesForType(FactoryBean.class)).hasSize(1);
assertThat(context.getBeanNamesForType(TypedFactoryBean.class)).hasSize(1);
}
@Test
void individualBeanWithFactoryBeanObjectTypeAsTargetTypeAndLazy() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
RootBeanDefinition bd = new RootBeanDefinition();
bd.setBeanClass(TypedFactoryBean.class);
bd.setTargetType(String.class);
bd.setLazyInit(true);
context.registerBeanDefinition("fb", bd);
context.refresh();
assertThat(context.getType("&fb")).isNull();
assertThat(context.getType("fb")).isEqualTo(String.class);
assertThat(context.getBean("&fb")).isInstanceOf(FactoryBean.class);
assertThat(context.getType("&fb")).isEqualTo(TypedFactoryBean.class);
assertThat(context.getType("fb")).isEqualTo(String.class);
assertThat(context.getBeanNamesForType(FactoryBean.class)).hasSize(1);
assertThat(context.getBeanNamesForType(TypedFactoryBean.class)).hasSize(1);
}
@Configuration
static class Config {
@Bean
TestBean testBean() {
TestBean testBean = new TestBean();
testBean.name = "foo";
return testBean;
}
}
@Configuration("customConfigBeanName")
static class ConfigWithCustomName {
@Bean
TestBean testBean() {
return new TestBean();
}
}
@Configuration
static class TwoTestBeanConfig {
@Bean TestBean tb1() {
return new TestBean();
}
@Bean TestBean tb2() {
return new TestBean();
}
}
@Configuration
static class NameConfig {
@Bean String name() { return "foo"; }
}
@Configuration
@Import(NameConfig.class)
static class AutowiredConfig {
@Autowired String autowiredName;
@Bean TestBean testBean() {
TestBean testBean = new TestBean();
testBean.name = autowiredName;
return testBean;
}
}
static class BeanA {
BeanB b;
BeanC c;
@Autowired
BeanA(BeanB b, BeanC c) {
this.b = b;
this.c = c;
}
}
static class BeanB {
@Autowired ApplicationContext applicationContext;
public BeanB() {
}
}
static class BeanC {}
static class NonInstantiatedFactoryBean implements FactoryBean<String> {
NonInstantiatedFactoryBean() {
throw new IllegalStateException();
}
@Override
public String getObject() {
return "";
}
@Override
public Class<?> getObjectType() {
return String.class;
}
@Override
public boolean isSingleton() {
return true;
}
}
static class TypedFactoryBean implements FactoryBean<String> {
@Override
public String getObject() {
return "";
}
@Override
public Class<?> getObjectType() {
return String.class;
}
@Override
public boolean isSingleton() {
return true;
}
}
static class UntypedFactoryBean implements FactoryBean<Object> {
@Override
public Object getObject() {
return null;
}
@Override
public Class<?> getObjectType() {
return null;
}
@Override
public boolean isSingleton() {
return false;
}
}
}
class TestBean {
String name;
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (name == null ? 0 : name.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
TestBean other = (TestBean) obj;
if (name == null) {
if (other.name != null) {
return false;
}
}
else if (!name.equals(other.name)) {
return false;
}
return true;
}
}
| spring-context/src/test/java/org/springframework/context/annotation/AnnotationConfigApplicationContextTests.java | /*
* Copyright 2002-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.context.annotation;
import java.util.Map;
import java.util.regex.Pattern;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.BeanPostProcessor;
import org.springframework.beans.factory.support.RootBeanDefinition;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation6.ComponentForScanning;
import org.springframework.context.annotation6.ConfigForScanning;
import org.springframework.context.annotation6.Jsr330NamedForScanning;
import org.springframework.core.ResolvableType;
import org.springframework.util.ObjectUtils;
import static java.lang.String.format;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
import static org.springframework.util.StringUtils.uncapitalize;
/**
* @author Chris Beams
* @author Juergen Hoeller
*/
@SuppressWarnings("resource")
class AnnotationConfigApplicationContextTests {
@Test
void scanAndRefresh() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.scan("org.springframework.context.annotation6");
context.refresh();
context.getBean(uncapitalize(ConfigForScanning.class.getSimpleName()));
context.getBean("testBean"); // contributed by ConfigForScanning
context.getBean(uncapitalize(ComponentForScanning.class.getSimpleName()));
context.getBean(uncapitalize(Jsr330NamedForScanning.class.getSimpleName()));
Map<String, Object> beans = context.getBeansWithAnnotation(Configuration.class);
assertThat(beans).hasSize(1);
}
@Test
void registerAndRefresh() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.register(Config.class, NameConfig.class);
context.refresh();
context.getBean("testBean");
context.getBean("name");
Map<String, Object> beans = context.getBeansWithAnnotation(Configuration.class);
assertThat(beans).hasSize(2);
}
@Test
void getBeansWithAnnotation() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.register(Config.class, NameConfig.class, UntypedFactoryBean.class);
context.refresh();
context.getBean("testBean");
context.getBean("name");
Map<String, Object> beans = context.getBeansWithAnnotation(Configuration.class);
assertThat(beans).hasSize(2);
}
@Test
void getBeanByType() {
ApplicationContext context = new AnnotationConfigApplicationContext(Config.class);
TestBean testBean = context.getBean(TestBean.class);
assertThat(testBean).isNotNull();
assertThat(testBean.name).isEqualTo("foo");
}
@Test
void getBeanByTypeRaisesNoSuchBeanDefinitionException() {
ApplicationContext context = new AnnotationConfigApplicationContext(Config.class);
// attempt to retrieve a bean that does not exist
Class<?> targetType = Pattern.class;
assertThatExceptionOfType(NoSuchBeanDefinitionException.class).isThrownBy(() ->
context.getBean(targetType))
.withMessageContaining(format("No qualifying bean of type '%s'", targetType.getName()));
}
@Test
void getBeanByTypeAmbiguityRaisesException() {
ApplicationContext context = new AnnotationConfigApplicationContext(TwoTestBeanConfig.class);
assertThatExceptionOfType(NoSuchBeanDefinitionException.class).isThrownBy(() ->
context.getBean(TestBean.class))
.withMessageContaining("No qualifying bean of type '" + TestBean.class.getName() + "'")
.withMessageContaining("tb1")
.withMessageContaining("tb2");
}
/**
* Tests that Configuration classes are registered according to convention
* @see org.springframework.beans.factory.support.DefaultBeanNameGenerator#generateBeanName
*/
@Test
void defaultConfigClassBeanNameIsGeneratedProperly() {
ApplicationContext context = new AnnotationConfigApplicationContext(Config.class);
// attempt to retrieve the instance by its generated bean name
Config configObject = (Config) context.getBean("annotationConfigApplicationContextTests.Config");
assertThat(configObject).isNotNull();
}
/**
* Tests that specifying @Configuration(value="foo") results in registering
* the configuration class with bean name 'foo'.
*/
@Test
void explicitConfigClassBeanNameIsRespected() {
ApplicationContext context = new AnnotationConfigApplicationContext(ConfigWithCustomName.class);
// attempt to retrieve the instance by its specified name
ConfigWithCustomName configObject = (ConfigWithCustomName) context.getBean("customConfigBeanName");
assertThat(configObject).isNotNull();
}
@Test
void autowiringIsEnabledByDefault() {
ApplicationContext context = new AnnotationConfigApplicationContext(AutowiredConfig.class);
assertThat(context.getBean(TestBean.class).name).isEqualTo("foo");
}
@Test
void nullReturningBeanPostProcessor() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.register(AutowiredConfig.class);
context.getBeanFactory().addBeanPostProcessor(new BeanPostProcessor() {
@Override
public Object postProcessBeforeInitialization(Object bean, String beanName) {
return (bean instanceof TestBean ? null : bean);
}
});
context.getBeanFactory().addBeanPostProcessor(new BeanPostProcessor() {
@Override
public Object postProcessBeforeInitialization(Object bean, String beanName) {
bean.getClass().getName();
return bean;
}
@Override
public Object postProcessAfterInitialization(Object bean, String beanName) {
bean.getClass().getName();
return bean;
}
});
context.refresh();
}
@Test
void individualBeans() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.register(BeanA.class, BeanB.class, BeanC.class);
context.refresh();
assertThat(context.getBean(BeanA.class).b).isSameAs(context.getBean(BeanB.class));
assertThat(context.getBean(BeanA.class).c).isSameAs(context.getBean(BeanC.class));
assertThat(context.getBean(BeanB.class).applicationContext).isSameAs(context);
}
@Test
void individualNamedBeans() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.registerBean("a", BeanA.class);
context.registerBean("b", BeanB.class);
context.registerBean("c", BeanC.class);
context.refresh();
assertThat(context.getBean("a", BeanA.class).b).isSameAs(context.getBean("b"));
assertThat(context.getBean("a", BeanA.class).c).isSameAs(context.getBean("c"));
assertThat(context.getBean("b", BeanB.class).applicationContext).isSameAs(context);
}
@Test
void individualBeanWithSupplier() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.registerBean(BeanA.class,
() -> new BeanA(context.getBean(BeanB.class), context.getBean(BeanC.class)));
context.registerBean(BeanB.class, BeanB::new);
context.registerBean(BeanC.class, BeanC::new);
context.refresh();
assertThat(context.getBeanFactory().containsSingleton("annotationConfigApplicationContextTests.BeanA")).isTrue();
assertThat(context.getBean(BeanA.class).b).isSameAs(context.getBean(BeanB.class));
assertThat(context.getBean(BeanA.class).c).isSameAs(context.getBean(BeanC.class));
assertThat(context.getBean(BeanB.class).applicationContext).isSameAs(context);
assertThat(context.getDefaultListableBeanFactory().getDependentBeans("annotationConfigApplicationContextTests.BeanB"))
.containsExactly("annotationConfigApplicationContextTests.BeanA");
assertThat(context.getDefaultListableBeanFactory().getDependentBeans("annotationConfigApplicationContextTests.BeanC"))
.containsExactly("annotationConfigApplicationContextTests.BeanA");
}
@Test
void individualBeanWithSupplierAndCustomizer() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.registerBean(BeanA.class,
() -> new BeanA(context.getBean(BeanB.class), context.getBean(BeanC.class)),
bd -> bd.setLazyInit(true));
context.registerBean(BeanB.class, BeanB::new);
context.registerBean(BeanC.class, BeanC::new);
context.refresh();
assertThat(context.getBeanFactory().containsSingleton("annotationConfigApplicationContextTests.BeanA")).isFalse();
assertThat(context.getBean(BeanA.class).b).isSameAs(context.getBean(BeanB.class));
assertThat(context.getBean(BeanA.class).c).isSameAs(context.getBean(BeanC.class));
assertThat(context.getBean(BeanB.class).applicationContext).isSameAs(context);
}
@Test
void individualNamedBeanWithSupplier() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.registerBean("a", BeanA.class,
() -> new BeanA(context.getBean(BeanB.class), context.getBean(BeanC.class)));
context.registerBean("b", BeanB.class, BeanB::new);
context.registerBean("c", BeanC.class, BeanC::new);
context.refresh();
assertThat(context.getBeanFactory().containsSingleton("a")).isTrue();
assertThat(context.getBean(BeanA.class).b).isSameAs(context.getBean("b", BeanB.class));
assertThat(context.getBean("a", BeanA.class).c).isSameAs(context.getBean("c"));
assertThat(context.getBean("b", BeanB.class).applicationContext).isSameAs(context);
}
@Test
void individualNamedBeanWithSupplierAndCustomizer() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.registerBean("a", BeanA.class,
() -> new BeanA(context.getBean(BeanB.class), context.getBean(BeanC.class)),
bd -> bd.setLazyInit(true));
context.registerBean("b", BeanB.class, BeanB::new);
context.registerBean("c", BeanC.class, BeanC::new);
context.refresh();
assertThat(context.getBeanFactory().containsSingleton("a")).isFalse();
assertThat(context.getBean(BeanA.class).b).isSameAs(context.getBean("b", BeanB.class));
assertThat(context.getBean("a", BeanA.class).c).isSameAs(context.getBean("c"));
assertThat(context.getBean("b", BeanB.class).applicationContext).isSameAs(context);
}
@Test
void individualBeanWithNullReturningSupplier() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.registerBean("a", BeanA.class, () -> null);
context.registerBean("b", BeanB.class, BeanB::new);
context.registerBean("c", BeanC.class, BeanC::new);
context.refresh();
assertThat(ObjectUtils.containsElement(context.getBeanNamesForType(BeanA.class), "a")).isTrue();
assertThat(ObjectUtils.containsElement(context.getBeanNamesForType(BeanB.class), "b")).isTrue();
assertThat(ObjectUtils.containsElement(context.getBeanNamesForType(BeanC.class), "c")).isTrue();
assertThat(context.getBeansOfType(BeanA.class)).isEmpty();
assertThat(context.getBeansOfType(BeanB.class).values().iterator().next()).isSameAs(context.getBean(BeanB.class));
assertThat(context.getBeansOfType(BeanC.class).values().iterator().next()).isSameAs(context.getBean(BeanC.class));
assertThatExceptionOfType(NoSuchBeanDefinitionException.class).isThrownBy(() ->
context.getBeanFactory().resolveNamedBean(BeanA.class));
assertThat(context.getBeanFactory().resolveNamedBean(BeanB.class).getBeanInstance()).isSameAs(context.getBean(BeanB.class));
assertThat(context.getBeanFactory().resolveNamedBean(BeanC.class).getBeanInstance()).isSameAs(context.getBean(BeanC.class));
}
@Test
void individualBeanWithSpecifiedConstructorArguments() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
BeanB b = new BeanB();
BeanC c = new BeanC();
context.registerBean(BeanA.class, b, c);
context.refresh();
assertThat(context.getBean(BeanA.class).b).isSameAs(b);
assertThat(context.getBean(BeanA.class).c).isSameAs(c);
assertThat(b.applicationContext).isNull();
}
@Test
void individualNamedBeanWithSpecifiedConstructorArguments() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
BeanB b = new BeanB();
BeanC c = new BeanC();
context.registerBean("a", BeanA.class, b, c);
context.refresh();
assertThat(context.getBean("a", BeanA.class).b).isSameAs(b);
assertThat(context.getBean("a", BeanA.class).c).isSameAs(c);
assertThat(b.applicationContext).isNull();
}
@Test
void individualBeanWithMixedConstructorArguments() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
BeanC c = new BeanC();
context.registerBean(BeanA.class, c);
context.registerBean(BeanB.class);
context.refresh();
assertThat(context.getBean(BeanA.class).b).isSameAs(context.getBean(BeanB.class));
assertThat(context.getBean(BeanA.class).c).isSameAs(c);
assertThat(context.getBean(BeanB.class).applicationContext).isSameAs(context);
}
@Test
void individualNamedBeanWithMixedConstructorArguments() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
BeanC c = new BeanC();
context.registerBean("a", BeanA.class, c);
context.registerBean("b", BeanB.class);
context.refresh();
assertThat(context.getBean("a", BeanA.class).b).isSameAs(context.getBean("b", BeanB.class));
assertThat(context.getBean("a", BeanA.class).c).isSameAs(c);
assertThat(context.getBean("b", BeanB.class).applicationContext).isSameAs(context);
}
@Test
void individualBeanWithFactoryBeanSupplier() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.registerBean("fb", NonInstantiatedFactoryBean.class, NonInstantiatedFactoryBean::new, bd -> bd.setLazyInit(true));
context.refresh();
assertThat(context.getType("fb")).isEqualTo(String.class);
assertThat(context.getType("&fb")).isEqualTo(NonInstantiatedFactoryBean.class);
assertThat(context.getBeanNamesForType(FactoryBean.class)).hasSize(1);
assertThat(context.getBeanNamesForType(NonInstantiatedFactoryBean.class)).hasSize(1);
}
@Test
void individualBeanWithFactoryBeanSupplierAndTargetType() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
RootBeanDefinition bd = new RootBeanDefinition();
bd.setInstanceSupplier(NonInstantiatedFactoryBean::new);
bd.setTargetType(ResolvableType.forClassWithGenerics(FactoryBean.class, String.class));
bd.setLazyInit(true);
context.registerBeanDefinition("fb", bd);
context.refresh();
assertThat(context.getType("fb")).isEqualTo(String.class);
assertThat(context.getType("&fb")).isEqualTo(FactoryBean.class);
assertThat(context.getBeanNamesForType(FactoryBean.class)).hasSize(1);
assertThat(context.getBeanNamesForType(NonInstantiatedFactoryBean.class)).isEmpty();
}
@Test
void individualBeanWithFactoryBeanObjectTypeAsTargetType() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
RootBeanDefinition bd = new RootBeanDefinition();
bd.setBeanClass(TypedFactoryBean.class);
bd.setTargetType(String.class);
context.registerBeanDefinition("fb", bd);
context.refresh();
assertThat(context.getType("&fb")).isEqualTo(TypedFactoryBean.class);
assertThat(context.getType("fb")).isEqualTo(String.class);
assertThat(context.getBeanNamesForType(FactoryBean.class)).hasSize(1);
assertThat(context.getBeanNamesForType(TypedFactoryBean.class)).hasSize(1);
}
@Test
void individualBeanWithFactoryBeanObjectTypeAsTargetTypeAndLazy() {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
RootBeanDefinition bd = new RootBeanDefinition();
bd.setBeanClass(TypedFactoryBean.class);
bd.setTargetType(String.class);
bd.setLazyInit(true);
context.registerBeanDefinition("fb", bd);
context.refresh();
assertThat(context.getType("&fb")).isNull();
assertThat(context.getType("fb")).isEqualTo(String.class);
assertThat(context.getBean("&fb")).isInstanceOf(FactoryBean.class);
assertThat(context.getType("&fb")).isEqualTo(TypedFactoryBean.class);
assertThat(context.getType("fb")).isEqualTo(String.class);
assertThat(context.getBeanNamesForType(FactoryBean.class)).hasSize(1);
assertThat(context.getBeanNamesForType(TypedFactoryBean.class)).hasSize(1);
}
@Configuration
static class Config {
@Bean
TestBean testBean() {
TestBean testBean = new TestBean();
testBean.name = "foo";
return testBean;
}
}
@Configuration("customConfigBeanName")
static class ConfigWithCustomName {
@Bean
TestBean testBean() {
return new TestBean();
}
}
@Configuration
static class TwoTestBeanConfig {
@Bean TestBean tb1() {
return new TestBean();
}
@Bean TestBean tb2() {
return new TestBean();
}
}
@Configuration
static class NameConfig {
@Bean String name() { return "foo"; }
}
@Configuration
@Import(NameConfig.class)
static class AutowiredConfig {
@Autowired String autowiredName;
@Bean TestBean testBean() {
TestBean testBean = new TestBean();
testBean.name = autowiredName;
return testBean;
}
}
static class BeanA {
BeanB b;
BeanC c;
@Autowired
BeanA(BeanB b, BeanC c) {
this.b = b;
this.c = c;
}
}
static class BeanB {
@Autowired ApplicationContext applicationContext;
public BeanB() {
}
}
static class BeanC {}
static class NonInstantiatedFactoryBean implements FactoryBean<String> {
NonInstantiatedFactoryBean() {
throw new IllegalStateException();
}
@Override
public String getObject() {
return "";
}
@Override
public Class<?> getObjectType() {
return String.class;
}
@Override
public boolean isSingleton() {
return true;
}
}
static class TypedFactoryBean implements FactoryBean<String> {
@Override
public String getObject() {
return "";
}
@Override
public Class<?> getObjectType() {
return String.class;
}
@Override
public boolean isSingleton() {
return true;
}
}
static class UntypedFactoryBean implements FactoryBean<Object> {
@Override
public Object getObject() {
return null;
}
@Override
public Class<?> getObjectType() {
return null;
}
@Override
public boolean isSingleton() {
return false;
}
}
}
class TestBean {
String name;
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (name == null ? 0 : name.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
TestBean other = (TestBean) obj;
if (name == null) {
if (other.name != null) {
return false;
}
}
else if (!name.equals(other.name)) {
return false;
}
return true;
}
}
| Improve nullReturningBeanPostProcessor() test
See gh-28459
| spring-context/src/test/java/org/springframework/context/annotation/AnnotationConfigApplicationContextTests.java | Improve nullReturningBeanPostProcessor() test | <ide><path>pring-context/src/test/java/org/springframework/context/annotation/AnnotationConfigApplicationContextTests.java
<ide> /*
<del> * Copyright 2002-2020 the original author or authors.
<add> * Copyright 2002-2022 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> void nullReturningBeanPostProcessor() {
<ide> AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
<ide> context.register(AutowiredConfig.class);
<add> // 1st BPP always gets invoked
<add> context.getBeanFactory().addBeanPostProcessor(new BeanPostProcessor() {
<add> @Override
<add> public Object postProcessBeforeInitialization(Object bean, String beanName) {
<add> if (bean instanceof TestBean) {
<add> TestBean testBean = (TestBean) bean;
<add> testBean.name = testBean.name + "-before";
<add> }
<add> return bean;
<add> }
<add> @Override
<add> public Object postProcessAfterInitialization(Object bean, String beanName) {
<add> if (bean instanceof TestBean) {
<add> TestBean testBean = (TestBean) bean;
<add> testBean.name = testBean.name + "-after";
<add> }
<add> return bean;
<add> }
<add> });
<add> // 2nd BPP always returns null for a TestBean
<ide> context.getBeanFactory().addBeanPostProcessor(new BeanPostProcessor() {
<ide> @Override
<ide> public Object postProcessBeforeInitialization(Object bean, String beanName) {
<ide> return (bean instanceof TestBean ? null : bean);
<ide> }
<add> @Override
<add> public Object postProcessAfterInitialization(Object bean, String beanName) {
<add> return (bean instanceof TestBean ? null : bean);
<add> }
<ide> });
<add> // 3rd BPP never gets invoked with a TestBean
<ide> context.getBeanFactory().addBeanPostProcessor(new BeanPostProcessor() {
<ide> @Override
<ide> public Object postProcessBeforeInitialization(Object bean, String beanName) {
<del> bean.getClass().getName();
<add> assertThat(bean).isNotInstanceOf(TestBean.class);
<ide> return bean;
<ide> }
<ide> @Override
<ide> public Object postProcessAfterInitialization(Object bean, String beanName) {
<del> bean.getClass().getName();
<add> assertThat(bean).isNotInstanceOf(TestBean.class);
<ide> return bean;
<ide> }
<ide> });
<ide> context.refresh();
<add> TestBean testBean = context.getBean(TestBean.class);
<add> assertThat(testBean).isNotNull();
<add> assertThat(testBean.name).isEqualTo("foo-before-after");
<ide> }
<ide>
<ide> @Test |
|
Java | agpl-3.0 | error: pathspec 'src/test/java/com/imcode/imcms/controller/api/TemplateGroupControllerTest.java' did not match any file(s) known to git
| 52744229401034d0cb0481b4917f610c8d3e62a1 | 1 | imCodePartnerAB/imcms,imCodePartnerAB/imcms,imCodePartnerAB/imcms | package com.imcode.imcms.controller.api;
import com.imcode.imcms.components.datainitializer.TemplateDataInitializer;
import com.imcode.imcms.controller.AbstractControllerTest;
import com.imcode.imcms.domain.service.TemplateGroupService;
import com.imcode.imcms.model.TemplateGroup;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.web.servlet.request.MockHttpServletRequestBuilder;
import org.springframework.test.web.servlet.request.MockMvcRequestBuilders;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
@Transactional
public class TemplateGroupControllerTest extends AbstractControllerTest {
@Autowired
private TemplateDataInitializer dataInitializer;
@Autowired
private TemplateGroupService templateGroupService;
@BeforeEach
public void setUp() {
dataInitializer.cleanRepositories();
}
@Override
protected String controllerPath() {
return "/template-group";
}
@Test
public void getAll_When_TemplateGroupExists_Expected_OkAndCorrectSize() throws Exception {
final List<TemplateGroup> test = dataInitializer.createTemplateGroups(4);
final MockHttpServletRequestBuilder requestBuilder = MockMvcRequestBuilders.get(controllerPath());
performRequestBuilderExpectedOkAndJsonContentEquals(requestBuilder, asJson(test.toArray()));
}
@Test
public void getAll_When_TemplateGroupNotExists_Expected_EmptyResult() throws Exception {
final MockHttpServletRequestBuilder requestBuilder = MockMvcRequestBuilders.get(controllerPath());
performRequestBuilderExpectedOkAndJsonContentEquals(requestBuilder, asJson("[]"));
}
@Test
public void getByName_When_TemplateGroupNotExists_Expected_CorrectException() {
}
@Test
public void getByName_When_TemplateGroupExists_Expected_CorrectEntity() {
}
@Test
public void create_Expected_CreateEntity() {
}
@Test
public void create_When_TemplateGroupHasEmptyName_Expected_CorrectException() {
}
@Test
public void create_When_TemplateGroupExistWithName_Expected_CorrectException() {
}
@Test
public void edit_When_TemplateGroupHasEmptyName_Expected_CorrectException() {
}
@Test
public void edit_When_TemplateGroupExistWithName_Expected_CorrectException() {
}
@Test
public void edit_Expected_EditEntity() {
}
@Test
public void delete_When_TemplateGroupExist_Expected_Deleted() {
}
@Test
public void delete_When_TemplateGroupNotExist_Expected_CorrectException() {
}
}
| src/test/java/com/imcode/imcms/controller/api/TemplateGroupControllerTest.java | Issue IMCMS-332: New design to super admin page: files tab
- Create test and scenario;
| src/test/java/com/imcode/imcms/controller/api/TemplateGroupControllerTest.java | Issue IMCMS-332: New design to super admin page: files tab - Create test and scenario; | <ide><path>rc/test/java/com/imcode/imcms/controller/api/TemplateGroupControllerTest.java
<add>package com.imcode.imcms.controller.api;
<add>
<add>import com.imcode.imcms.components.datainitializer.TemplateDataInitializer;
<add>import com.imcode.imcms.controller.AbstractControllerTest;
<add>import com.imcode.imcms.domain.service.TemplateGroupService;
<add>import com.imcode.imcms.model.TemplateGroup;
<add>import org.junit.jupiter.api.BeforeEach;
<add>import org.junit.jupiter.api.Test;
<add>import org.springframework.beans.factory.annotation.Autowired;
<add>import org.springframework.test.web.servlet.request.MockHttpServletRequestBuilder;
<add>import org.springframework.test.web.servlet.request.MockMvcRequestBuilders;
<add>import org.springframework.transaction.annotation.Transactional;
<add>
<add>import java.util.List;
<add>
<add>@Transactional
<add>public class TemplateGroupControllerTest extends AbstractControllerTest {
<add>
<add> @Autowired
<add> private TemplateDataInitializer dataInitializer;
<add>
<add> @Autowired
<add> private TemplateGroupService templateGroupService;
<add>
<add> @BeforeEach
<add> public void setUp() {
<add> dataInitializer.cleanRepositories();
<add> }
<add>
<add> @Override
<add> protected String controllerPath() {
<add> return "/template-group";
<add> }
<add>
<add> @Test
<add> public void getAll_When_TemplateGroupExists_Expected_OkAndCorrectSize() throws Exception {
<add> final List<TemplateGroup> test = dataInitializer.createTemplateGroups(4);
<add> final MockHttpServletRequestBuilder requestBuilder = MockMvcRequestBuilders.get(controllerPath());
<add> performRequestBuilderExpectedOkAndJsonContentEquals(requestBuilder, asJson(test.toArray()));
<add> }
<add>
<add> @Test
<add> public void getAll_When_TemplateGroupNotExists_Expected_EmptyResult() throws Exception {
<add> final MockHttpServletRequestBuilder requestBuilder = MockMvcRequestBuilders.get(controllerPath());
<add> performRequestBuilderExpectedOkAndJsonContentEquals(requestBuilder, asJson("[]"));
<add> }
<add>
<add> @Test
<add> public void getByName_When_TemplateGroupNotExists_Expected_CorrectException() {
<add>
<add> }
<add>
<add> @Test
<add> public void getByName_When_TemplateGroupExists_Expected_CorrectEntity() {
<add>
<add> }
<add>
<add> @Test
<add> public void create_Expected_CreateEntity() {
<add>
<add> }
<add>
<add> @Test
<add> public void create_When_TemplateGroupHasEmptyName_Expected_CorrectException() {
<add>
<add> }
<add>
<add> @Test
<add> public void create_When_TemplateGroupExistWithName_Expected_CorrectException() {
<add>
<add> }
<add>
<add> @Test
<add> public void edit_When_TemplateGroupHasEmptyName_Expected_CorrectException() {
<add>
<add> }
<add>
<add> @Test
<add> public void edit_When_TemplateGroupExistWithName_Expected_CorrectException() {
<add>
<add> }
<add>
<add> @Test
<add> public void edit_Expected_EditEntity() {
<add>
<add> }
<add>
<add> @Test
<add> public void delete_When_TemplateGroupExist_Expected_Deleted() {
<add>
<add> }
<add>
<add> @Test
<add> public void delete_When_TemplateGroupNotExist_Expected_CorrectException() {
<add>
<add> }
<add>} |
|
Java | apache-2.0 | fc347b90860dc834b5752e9a2deb03bd27c544f7 | 0 | lorban/terracotta-auditor | /*
* Copyright Terracotta, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terracotta.auditor.verifier;
import java.util.Objects;
public class RecordValue {
public static final RecordValue ABSENT = new RecordValue(null, true, false);
public static final RecordValue UNKNOWN_PRESENT = new RecordValue(null, false, true);
private final String result;
private final boolean absent;
private final boolean unknown;
public RecordValue(String result) {
this(result, false, false);
}
private RecordValue(String result, boolean absent, boolean unknown) {
this.result = result;
this.absent = absent;
this.unknown = unknown;
}
public boolean isAbsent() {
return absent;
}
public boolean isUnknown() {
return unknown;
}
public String getResult() {
return result;
}
@Override
public String toString() {
if (absent) {
return "absent";
}
if (unknown) {
return "unknown";
}
return result;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof RecordValue)) {
return false;
}
RecordValue other = (RecordValue) o;
if (this == ABSENT) {
return other == ABSENT;
}
if (this == UNKNOWN_PRESENT) {
return other == UNKNOWN_PRESENT;
}
return Objects.equals(result, other.result);
}
@Override
public int hashCode() {
return Objects.hash(absent, unknown, result);
}
}
| src/main/java/org/terracotta/auditor/verifier/RecordValue.java | /*
* Copyright Terracotta, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terracotta.auditor.verifier;
public class RecordValue {
public static final RecordValue ABSENT = new RecordValue(null, true, false);
public static final RecordValue UNKNOWN_PRESENT = new RecordValue(null, false, true);
private final String result;
private final boolean absent;
private final boolean unknown;
public RecordValue(String result) {
this(result, false, false);
}
private RecordValue(String result, boolean absent, boolean unknown) {
this.result = result;
this.absent = absent;
this.unknown = unknown;
}
public boolean isAbsent() {
return absent;
}
public boolean isUnknown() {
return unknown;
}
public String getResult() {
return result;
}
@Override
public String toString() {
if (absent) {
return "absent";
}
if (unknown) {
return "unknown";
}
return result;
}
}
| Equals and hashCode for RecordValue
| src/main/java/org/terracotta/auditor/verifier/RecordValue.java | Equals and hashCode for RecordValue | <ide><path>rc/main/java/org/terracotta/auditor/verifier/RecordValue.java
<ide> * limitations under the License.
<ide> */
<ide> package org.terracotta.auditor.verifier;
<add>
<add>import java.util.Objects;
<ide>
<ide> public class RecordValue {
<ide> public static final RecordValue ABSENT = new RecordValue(null, true, false);
<ide> }
<ide> return result;
<ide> }
<add>
<add> @Override
<add> public boolean equals(Object o) {
<add> if (this == o) {
<add> return true;
<add> }
<add>
<add> if (!(o instanceof RecordValue)) {
<add> return false;
<add> }
<add>
<add> RecordValue other = (RecordValue) o;
<add>
<add> if (this == ABSENT) {
<add> return other == ABSENT;
<add> }
<add>
<add> if (this == UNKNOWN_PRESENT) {
<add> return other == UNKNOWN_PRESENT;
<add> }
<add>
<add> return Objects.equals(result, other.result);
<add> }
<add>
<add> @Override
<add> public int hashCode() {
<add> return Objects.hash(absent, unknown, result);
<add> }
<ide> } |
|
JavaScript | mit | 1e1d83cb3288b74255cc13b928470cf0252bc6d4 | 0 | gkjohnson/three-mesh-bvh,gkjohnson/three-mesh-bvh,gkjohnson/three-mesh-bvh | import * as THREE from '../node_modules/three/build/three.module.js'
// Settings
const maxLeafNodes = 10;
const maxMatchingTriangles = 0.5;
// Utilities
const abcFields = ['a', 'b', 'c'];
const xyzFields = ['x', 'y', 'z'];
const getLongestEdgeIndex = bb => {
let splitDimIdx = -1;
let splitDist = -Infinity;
xyzFields.forEach((d, i) => {
const dist = bb.max[d] - bb.min[d];
if (dist > splitDist) {
splitDist = dist;
splitDimIdx = i;
}
});
return splitDimIdx;
}
// reusable vectors
const avgtemp = new THREE.Vector3();
const vectemp = new THREE.Vector3();
const centemp = new THREE.Vector3();
// for BufferGeometry
const getBoundsBufferGeometry = (tris, bounds, avg, geo) => {
const pos = geo.attributes.position.array;
const getVertIndex = geo.index ? (i => geo.index.array[i]) : (i => i);
avg.set(0, 0, 0);
for (let i = 0; i < tris.length; i ++) {
const tri = tris[i];
for (let v = 0; v < 3; v ++) {
const vindex = getVertIndex(tri * 3 + v);
const x = pos[vindex * 3 + 0];
const y = pos[vindex * 3 + 1];
const z = pos[vindex * 3 + 2];
vectemp.x = x;
vectemp.y = y;
vectemp.z = z;
bounds.expandByPoint(vectemp);
avg.x += x;
avg.y += y;
avg.z += z;
}
}
avg.x /= tris.length * 3;
avg.y /= tris.length * 3;
avg.z /= tris.length * 3;
}
const getSphereBufferGeometry = (tris, sphere, geo) => {
const pos = geo.attributes.position.array;
const getVertIndex = geo.index ? (i => geo.index.array[i]) : (i => i);
const center = sphere.center;
let maxRadiusSq = 0;
for (let i = 0; i < tris.length; i ++) {
const tri = tris[i];
for (let v = 0; v < 3; v ++) {
const vindex = getVertIndex(tri * 3 + v);
const x = pos[vindex + 0];
const y = pos[vindex + 1];
const z = pos[vindex + 2];
vectemp.x = x;
vectemp.y = y;
vectemp.z = z;
maxRadiusSq = Math.max(maxRadiusSq, center.distanceToSquared(vectemp));
}
}
sphere.radius = Math.sqrt(maxRadiusSq);
}
// for Geometry
const getBoundsGeometry = (tris, bounds, avg, geo) => {
const faces = geo.faces;
const verts = geo.vertices;
avg.set(0, 0, 0);
for (let i = 0; i < tris.length; i ++) {
const face = faces[tris[i]];
abcFields.forEach(id => {
const vert = verts[face[id]];
bounds.expandByPoint(vert);
avg.x += vert.x;
avg.y += vert.y;
avg.z += vert.z;
});
}
avg.x /= tris.length * 3;
avg.y /= tris.length * 3;
avg.z /= tris.length * 3;
}
const getSphereGeometry = (tris, sphere, geo) => {
const faces = geo.faces;
const verts = geo.vertices;
const center = sphere.center;
let maxRadiusSq = 0;
for (let i = 0; i < tris.length; i ++) {
const face = faces[tris[i]];
abcFields.forEach(id => {
const vert = verts[face[id]];
maxRadiusSq = Math.max(maxRadiusSq, center.distanceToSquared(vert));
});
}
sphere.radius = Math.sqrt(maxRadiusSq);
}
// Classes
class TriangleBoundsNode {
constructor() {
this.boundingBox = new THREE.Box3();
this.boundingSphere = new THREE.Sphere();
this.children = [];
this.tris = [];
}
}
class TriangleBoundsTree {
constructor(geo) {
if (geo.isBufferGeometry) {
this._root = this._initBufferGeometry(geo);
} else if(geo.isGeometry) {
this._root = this._initGeometry(geo);
} else {
throw new Error('Object is not Geometry or BufferGeometry');
}
}
/* Public API */
collectCandidates(origray) {
const candidates = [];
const recurse = (node, ray) => {
if (!ray.intersectsSphere(node.boundingSphere) || !ray.intersectsBox(node.boundingBox)) return;
if (node.children.length) node.children.forEach(c => recurse(c, ray))
else candidates.push(...node.tris)
}
recurse(this._root, origray);
return candidates;
}
/* Private Functions */
_initBufferGeometry(geo) {
// function for retrieving the next vertex index because
// we may not have array indices
const getVertIndex = geo.index ? (i => geo.index.array[i]) : (i => i);
const pos = geo.attributes.position.array;
// a list of every available triangle index
const origTris = new Array(geo.index ? (geo.index.count / 3) : (pos.length / 9));
for (let i = 0; i < origTris.length; i ++) origTris[i] = i;
// use a queue to run the node creation functions
// because otherwise we run the risk of a stackoverflow
// In the case of buffer geometry it also seems to be
// faster than recursing
const queue = [];
const createNode = tris => {
const node = new TriangleBoundsNode();
// get the bounds of the triangles
getBoundsBufferGeometry(tris, node.boundingBox, avgtemp, geo);
node.boundingBox.getCenter(node.boundingSphere.center);
getSphereBufferGeometry(tris, node.boundingSphere, geo);
if (tris.length <= maxLeafNodes) {
node.tris = tris;
return node;
}
// decide which axis to split on (longest edge)
const splitDimIdx = getLongestEdgeIndex(node.boundingBox);
const splitDimStr = xyzFields[splitDimIdx];
const left = [], right = [], shared = [];
for (let i = 0; i < tris.length; i ++) {
const tri = tris[i];
let inLeft = false;
let inRight = false;
for (let v = 0; v < 3; v ++) {
const vindex = getVertIndex(tri * 3 + v);
// get the vertex value along the
// given axis
const val = pos[vindex * 3 + splitDimIdx];
inLeft = inLeft || val >= avgtemp[splitDimStr];
inRight = inRight || val <= avgtemp[splitDimStr];
}
if (inLeft && inRight) shared.push(tri);
if (inLeft) left.push(tri);
if (inRight && !inLeft) right.push(tri);
}
if (shared.length / tris.length >= maxMatchingTriangles) {
node.tris = tris;
} else {
if (left.length) queue.push(() => node.children.push(createNode(left)));
if (right.length) queue.push(() => node.children.push(createNode(right)));
}
return node;
}
const n = createNode(origTris);
while (queue.length) queue.pop()();
return n;
}
_initGeometry(geo) {
const faces = geo.faces;
const verts = geo.vertices;
// a list of every available triangle index
const origTris = new Array(faces.length);
for (let i = 0; i < origTris.length; i ++) origTris[i] = i;
// use a queue to run the node creation functions
// because otherwise we run the risk of a stackoverflow
const queue = [];
const createNode = tris => {
const node = new TriangleBoundsNode();
// Calculate the bounds
getBoundsGeometry(tris, node.boundingBox, avgtemp, geo);
node.boundingBox.getCenter(node.boundingSphere.center);
getSphereGeometry(tris, node.boundingSphere, geo);
if (tris.length <= maxLeafNodes) {
node.tris = tris;
return node;
}
// decide which axis to split on (longest edge)
const splitDimIdx = getLongestEdgeIndex(node.boundingBox);
const splitDimStr = xyzFields[splitDimIdx];
const left = [], right = [], shared = [];
for (let i = 0; i < tris.length; i ++) {
const tri = tris[i];
const face = faces[tri];
let inLeft = false;
let inRight = false;
abcFields.forEach(id => {
const vert = verts[face[id]];
const val = vert[splitDimStr];
inLeft = inLeft || val >= avgtemp[splitDimStr];
inRight = inRight || val <= avgtemp[splitDimStr];
});
if (inLeft && inRight) shared.push(tri);
if (inLeft) left.push(tri);
if (inRight && !inLeft) right.push(tri);
}
if (shared.length / tris.length >= maxMatchingTriangles) {
node.tris = tris;
} else {
if (left.length) queue.push(() => node.children.push(createNode(left)));
if (right.length) queue.push(() => node.children.push(createNode(right)));
}
return node;
}
const n = createNode(origTris);
while (queue.length) queue.pop()();
return n;
}
}
export default TriangleBoundsTree;
| lib/TriangleBoundsTree.js | import * as THREE from '../node_modules/three/build/three.module.js'
// Settings
const maxLeafNodes = 10;
const maxMatchingTriangles = 0.5;
// Utilities
const abcFields = ['a', 'b', 'c'];
const xyzFields = ['x', 'y', 'z'];
const getLongestEdgeIndex = bb => {
let splitDimIdx = -1;
let splitDist = -Infinity;
xyzFields.forEach((d, i) => {
const dist = bb.max[d] - bb.min[d];
if (dist > splitDist) {
splitDist = dist;
splitDimIdx = i;
}
});
return splitDimIdx;
}
// reusable vectors
const avgtemp = new THREE.Vector3();
const vectemp = new THREE.Vector3();
const centemp = new THREE.Vector3();
// for BufferGeometry
const getBoundsBufferGeometry = (tris, bounds, avg, geo) => {
const pos = geo.attributes.position.array;
const getVertIndex = geo.index ? (i => geo.index.array[i]) : (i => i);
avg.set(0, 0, 0);
for (let i = 0; i < tris.length; i ++) {
const tri = tris[i];
for (let v = 0; v < 3; v ++) {
const vindex = getVertIndex(tri * 3 + v);
const x = pos[vindex * 3 + 0];
const y = pos[vindex * 3 + 1];
const z = pos[vindex * 3 + 2];
vectemp.x = x;
vectemp.y = y;
vectemp.z = z;
bounds.expandByPoint(vectemp);
avg.x += x;
avg.y += y;
avg.z += z;
}
}
avg.x /= tris.length * 3;
avg.y /= tris.length * 3;
avg.z /= tris.length * 3;
}
const getSphereBufferGeometry = (tris, sphere, geo) => {
const pos = geo.attributes.position.array;
const getVertIndex = geo.index ? (i => geo.index.array[i]) : (i => i);
const center = sphere.center;
let maxRadiusSq = 0;
for (let i = 0; i < tris.length; i ++) {
const tri = tris[i];
for (let v = 0; v < 3; v ++) {
const vindex = getVertIndex(tri * 3 + v);
const x = pos[vindex + 0];
const y = pos[vindex + 1];
const z = pos[vindex + 2];
vectemp.x = x;
vectemp.y = y;
vectemp.z = z;
maxRadiusSq = Math.max(maxRadiusSq, center.distanceToSquared(vectemp));
}
}
sphere.radius = Math.sqrt(maxRadiusSq);
}
// for Geometry
const getBoundsGeometry = (tris, bounds, avg, geo) => {
const faces = geo.faces;
const verts = geo.vertices;
avg.set(0, 0, 0);
for (let i = 0; i < tris.length; i ++) {
const face = faces[tris[i]];
abcFields.forEach(id => {
const vert = verts[face[id]];
bounds.expandByPoint(vert);
avg.x += vert.x;
avg.y += vert.y;
avg.z += vert.z;
});
}
avg.x /= tris.length * 3;
avg.y /= tris.length * 3;
avg.z /= tris.length * 3;
}
const getSphereGeometry = (tris, sphere, geo) => {
const faces = geo.faces;
const verts = geo.vertices;
const center = sphere.center;
let maxRadiusSq = 0;
for (let i = 0; i < tris.length; i ++) {
const face = faces[tris[i]];
abcFields.forEach(id => {
const vert = verts[face[id]];
maxRadiusSq = Math.max(maxRadiusSq, center.distanceToSquared(vert));
});
}
sphere.radius = Math.sqrt(maxRadiusSq);
}
// Classes
class TriangleBoundsNode {
constructor() {
this.boundingBox = new THREE.Box3();
this.boundingSphere = new THREE.Sphere();
this.children = [];
this.tris = [];
}
}
class TriangleBoundsTree {
constructor(geo) {
if (geo.isBufferGeometry) {
this._root = this._initBufferGeometry(geo);
} else if(geo.isGeometry) {
this._root = this._initGeometry(geo);
} else {
throw new Error('Object is not Geometry or BufferGeometry');
}
}
/* Public API */
collectCandidates(origray) {
let candidates = [];
const recurse = (node, ray) => {
if (!ray.intersectsSphere(node.boundingSphere) || !ray.intersectsBox(node.boundingBox)) return;
if (node.children.length) node.children.forEach(c => recurse(c, ray))
else candidates.push(...node.tris)
}
recurse(this._root, origray);
return candidates;
}
/* Private Functions */
_initBufferGeometry(geo) {
// function for retrieving the next vertex index because
// we may not have array indices
const getVertIndex = geo.index ? (i => geo.index.array[i]) : (i => i);
const pos = geo.attributes.position.array;
// a list of every available triangle index
const origTris = new Array(geo.index ? (geo.index.count / 3) : (pos.length / 9));
for (let i = 0; i < origTris.length; i ++) origTris[i] = i;
// use a queue to run the node creation functions
// because otherwise we run the risk of a stackoverflow
// In the case of buffer geometry it also seems to be
// faster than recursing
const queue = [];
const createNode = tris => {
const node = new TriangleBoundsNode();
// get the bounds of the triangles
getBoundsBufferGeometry(tris, node.boundingBox, avgtemp, geo);
node.boundingBox.getCenter(node.boundingSphere.center);
getSphereBufferGeometry(tris, node.boundingSphere, geo);
if (tris.length <= maxLeafNodes) {
node.tris = tris;
return node;
}
// decide which axis to split on (longest edge)
const splitDimIdx = getLongestEdgeIndex(node.boundingBox);
const splitDimStr = xyzFields[splitDimIdx];
const left = [], right = [], shared = [];
for (let i = 0; i < tris.length; i ++) {
const tri = tris[i];
let inLeft = false;
let inRight = false;
for (let v = 0; v < 3; v ++) {
const vindex = getVertIndex(tri * 3 + v);
// get the vertex value along the
// given axis
const val = pos[vindex * 3 + splitDimIdx];
inLeft = inLeft || val >= avgtemp[splitDimStr];
inRight = inRight || val <= avgtemp[splitDimStr];
}
if (inLeft && inRight) shared.push(tri);
if (inLeft) left.push(tri);
if (inRight && !inLeft) right.push(tri);
}
if (shared.length / tris.length >= maxMatchingTriangles) {
node.tris = tris;
} else {
if (left.length) queue.push(() => node.children.push(createNode(left)));
if (right.length) queue.push(() => node.children.push(createNode(right)));
}
return node;
}
const n = createNode(origTris);
while (queue.length) queue.pop()();
return n;
}
_initGeometry(geo) {
const faces = geo.faces;
const verts = geo.vertices;
// a list of every available triangle index
const origTris = new Array(faces.length);
for (let i = 0; i < origTris.length; i ++) origTris[i] = i;
// use a queue to run the node creation functions
// because otherwise we run the risk of a stackoverflow
const queue = [];
const createNode = tris => {
const node = new TriangleBoundsNode();
// Calculate the bounds
getBoundsGeometry(tris, node.boundingBox, avgtemp, geo);
node.boundingBox.getCenter(node.boundingSphere.center);
getSphereGeometry(tris, node.boundingSphere, geo);
if (tris.length <= maxLeafNodes) {
node.tris = tris;
return node;
}
// decide which axis to split on (longest edge)
const splitDimIdx = getLongestEdgeIndex(node.boundingBox);
const splitDimStr = xyzFields[splitDimIdx];
const left = [], right = [], shared = [];
for (let i = 0; i < tris.length; i ++) {
const tri = tris[i];
const face = faces[tri];
let inLeft = false;
let inRight = false;
abcFields.forEach(id => {
const vert = verts[face[id]];
const val = vert[splitDimStr];
inLeft = inLeft || val >= avgtemp[splitDimStr];
inRight = inRight || val <= avgtemp[splitDimStr];
});
if (inLeft && inRight) shared.push(tri);
if (inLeft) left.push(tri);
if (inRight && !inLeft) right.push(tri);
}
if (shared.length / tris.length >= maxMatchingTriangles) {
node.tris = tris;
} else {
if (left.length) queue.push(() => node.children.push(createNode(left)));
if (right.length) queue.push(() => node.children.push(createNode(right)));
}
return node;
}
const n = createNode(origTris);
while (queue.length) queue.pop()();
return n;
}
}
export default TriangleBoundsTree;
| let to const
| lib/TriangleBoundsTree.js | let to const | <ide><path>ib/TriangleBoundsTree.js
<ide>
<ide> /* Public API */
<ide> collectCandidates(origray) {
<del> let candidates = [];
<add> const candidates = [];
<ide> const recurse = (node, ray) => {
<ide> if (!ray.intersectsSphere(node.boundingSphere) || !ray.intersectsBox(node.boundingBox)) return;
<ide> |
|
Java | apache-2.0 | 991775423ecf092fb0f608e6d41ff131f78ed955 | 0 | KernelHaven/FeatureEffectAnalysis | package net.ssehub.kernel_haven.feature_effects;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import net.ssehub.kernel_haven.SetUpException;
import net.ssehub.kernel_haven.analysis.AnalysisComponent;
import net.ssehub.kernel_haven.config.Configuration;
import net.ssehub.kernel_haven.feature_effects.FeatureEffectFinder.VariableWithFeatureEffect;
import net.ssehub.kernel_haven.feature_effects.PcFinder.VariableWithPcs;
import net.ssehub.kernel_haven.feature_effects.Settings.SimplificationType;
import net.ssehub.kernel_haven.util.io.TableElement;
import net.ssehub.kernel_haven.util.io.TableRow;
import net.ssehub.kernel_haven.util.logic.Conjunction;
import net.ssehub.kernel_haven.util.logic.Disjunction;
import net.ssehub.kernel_haven.util.logic.DisjunctionQueue;
import net.ssehub.kernel_haven.util.logic.False;
import net.ssehub.kernel_haven.util.logic.Formula;
import net.ssehub.kernel_haven.util.logic.Negation;
import net.ssehub.kernel_haven.util.logic.True;
import net.ssehub.kernel_haven.util.logic.Variable;
/**
* A component that finds feature effects for variables.
*
* @author Adam
*/
public class FeatureEffectFinder extends AnalysisComponent<VariableWithFeatureEffect> {
/**
* A variable together with its feature effect formula.
*
* @author Adam
*/
@TableRow
public static class VariableWithFeatureEffect {
private String variable;
private Formula featureEffect;
/**
* Creates a new feature effect result.
*
* @param variable The variable name.
* @param featureEffect The feature effect of the given variable. Must not be <code>null</code>.
*/
public VariableWithFeatureEffect(String variable, Formula featureEffect) {
this.variable = variable;
this.featureEffect = featureEffect;
}
/**
* Returns the variable name.
*
* @return The name of the variable.
*/
@TableElement(name = "Variable", index = 0)
public String getVariable() {
return variable;
}
/**
* Returns the feature effect formula for this variable.
*
* @return The feature effect, never <code>null</code>.
*/
@TableElement(name = "Feature Effect", index = 1)
public Formula getFeatureEffect() {
return featureEffect;
}
@Override
public String toString() {
return "FeatureEffect[" + variable + "] = " + featureEffect.toString();
}
}
private AnalysisComponent<VariableWithPcs> pcFinder;
private PresenceConditionAnalysisHelper helper;
private SimplificationType simplifyType;
private FormulaSimplifier simplifier = null;
/**
* Creates a new {@link FeatureEffectFinder} for the given PC finder.
*
* @param config The global configuration.
* @param pcFinder The component to get the PCs from.
*
* @throws SetUpException If creating this component fails.
*/
public FeatureEffectFinder(Configuration config, AnalysisComponent<VariableWithPcs> pcFinder)
throws SetUpException {
super(config);
this.pcFinder = pcFinder;
this.helper = new PresenceConditionAnalysisHelper(config);
simplifyType = helper.getSimplificationMode();
if (simplifyType.ordinal() >= SimplificationType.PRESENCE_CONDITIONS.ordinal()) {
// Will throw an exception if CNF Utils are not present (but was selected by user in configuration file)
simplifier = new FormulaSimplifier();
}
}
@Override
protected void execute() {
VariableWithPcs pcs;
while ((pcs = pcFinder.getNextResult()) != null) {
if (helper.isRelevant(pcs.getVariable())) {
addResult(new VariableWithFeatureEffect(
helper.doReplacements(pcs.getVariable()),
helper.doReplacements(buildFeatureEffefct(pcs))
));
}
}
}
/**
* Replaces each occurrence of a variable with a constant.
*
* TODO: move this to general utils.
*
* @param formula The formula to replace the variable in; this formula is not altered.
* @param variable The variable to replace.
* @param value Which constant the variable should be replaced with.
* @param exactMatch Whether the variable name has to match exactly. If <code>false</code>, then startsWith()
* is used to find matches to replace.
*
* @return A new Formula equal to the given formula, but with each occurrence of the variable replaced.
*/
private Formula setToValue(Formula formula, String variable, boolean value, boolean exactMatch) {
Formula result;
if (formula instanceof Variable) {
Variable var = (Variable) formula;
boolean replace;
if (exactMatch) {
replace = var.getName().equals(variable);
} else {
replace = var.getName().startsWith(variable);
}
if (replace) {
result = (value ? True.INSTANCE : False.INSTANCE);
} else {
result = var;
}
} else if (formula instanceof Negation) {
result = new Negation(setToValue(((Negation) formula).getFormula(), variable, value, exactMatch));
} else if (formula instanceof Disjunction) {
result = new Disjunction(
setToValue(((Disjunction) formula).getLeft(), variable, value, exactMatch),
setToValue(((Disjunction) formula).getRight(), variable, value, exactMatch));
} else if (formula instanceof Conjunction) {
result = new Conjunction(
setToValue(((Conjunction) formula).getLeft(), variable, value, exactMatch),
setToValue(((Conjunction) formula).getRight(), variable, value, exactMatch));
} else {
result = formula;
}
return result;
}
/**
* Simplifies boolean formulas a bit. The following simplification rules are done:
* <ul>
* <li>NOT(NOT(a)) -> a</li>
* <li>NOT(true) -> false</li>
* <li>NOT(false) -> true</li>
*
* <li>true OR a -> true</li>
* <li>a OR true -> true</li>
* <li>false OR false -> false</li>
* <li>a OR false -> a</li>
* <li>false OR a -> a</li>
* <li>a OR a -> a</li>
*
* <li>false AND a -> false</li>
* <li>a AND false -> false</li>
* <li>true AND true -> true</li>
* <li>a AND true -> a</li>
* <li>true AND a -> a</li>
* <li>a AND a -> a</li>
* </ul>
*
* TODO: move this to general utils.
*
* @param formula The formula to simplify.
* @return A new formula equal to the original, but simplified.
*/
private Formula simplify(Formula formula) {
Formula result;
if (formula instanceof Negation) {
Formula nested = simplify(((Negation) formula).getFormula());
if (nested instanceof Negation) {
result = ((Negation) nested).getFormula();
} else if (nested instanceof True) {
result = False.INSTANCE;
} else if (nested instanceof False) {
result = True.INSTANCE;
} else {
result = new Negation(nested);
}
} else if (formula instanceof Disjunction) {
Formula left = simplify(((Disjunction) formula).getLeft());
Formula right = simplify(((Disjunction) formula).getRight());
if (left instanceof True || right instanceof True) {
result = True.INSTANCE;
} else if (left instanceof False && right instanceof False) {
result = False.INSTANCE;
} else if (left instanceof False) {
result = right;
} else if (right instanceof False) {
result = left;
} else if (left.equals(right)) {
result = left;
} else {
result = new Disjunction(left, right);
}
} else if (formula instanceof Conjunction) {
Formula left = simplify(((Conjunction) formula).getLeft());
Formula right = simplify(((Conjunction) formula).getRight());
if (left instanceof False || right instanceof False) {
result = False.INSTANCE;
} else if (left instanceof True && right instanceof True) {
result = True.INSTANCE;
} else if (left instanceof True) {
result = right;
} else if (right instanceof True) {
result = left;
} else if (left.equals(right)) {
result = left;
} else {
result = new Conjunction(left, right);
}
} else {
result = formula;
}
return result;
}
/**
* Creates a feature effect for the given variable and it's PCs.
* A feature effect is defined as:
* <code>Or over (for each PC in PCs ( PC[variable <- true] XOR PC[variable <- false] ))</code>.
*
*
* @param varWithPcs The variable and all presence condition that the variable appears in.
* @return A formula representing the feature effect of the variable.
*/
private Formula buildFeatureEffefct(VariableWithPcs varWithPcs) {
String variable = varWithPcs.getVariable();
Collection<Formula> pcs = varWithPcs.getPcs();
boolean simplify = simplifyType.ordinal() >= SimplificationType.PRESENCE_CONDITIONS.ordinal();
// This eliminates "duplicated" formulas, this is not done in simplifications for presence conditions.
pcs = simplify ? FeatureEffectReducer.simpleReduce(variable, pcs) : pcs;
// Check if presence conditions have already been simplified in earlier step
if (simplifyType.ordinal() > SimplificationType.PRESENCE_CONDITIONS.ordinal()) {
// Simplification wasn't applied to separate presence conditions before, do this here
List<Formula> tmp = new ArrayList<>(pcs.size());
for (Formula formula : pcs) {
tmp.add(simplifier.simplify(formula));
}
pcs = tmp;
}
Formula result = createXorTree(variable, simplify, pcs);
if (helper.isNonBooleanReplacements()) {
int index = variable.indexOf("_eq_");
if (index != -1) {
String varBaseName = variable.substring(0, index);
result = setToValue(result, varBaseName + "_", false, false);
}
}
Formula simplifiedResult;
if (simplify) {
// Perform a simplification on the final result
simplifiedResult = simplifier.simplify(result);
} else {
// At least try to resolve all the (unnecessary) XORs
simplifiedResult = simplify(result);
}
return simplifiedResult;
}
/**
* Creates the disjunction of the XOR elements as needed by the Feature effect algorithm.
* @param variable The variable name for which we currently compute the feature effect.
* @param simplify <tt>true</tt> if the result should be simplified
* @param pcs The presence conditions relevant for the variable.
* @return The feature effect constraint (pre-condition).
*/
private Formula createXorTree(String variable, boolean simplify, Collection<Formula> pcs) {
DisjunctionQueue innerElements;
DisjunctionQueue xorTrees;
if (null != simplifier) {
innerElements = new DisjunctionQueue(true, f -> simplifier.simplify(f));
xorTrees = new DisjunctionQueue(simplify, f -> simplifier.simplify(f));
} else {
innerElements = new DisjunctionQueue(true);
xorTrees = new DisjunctionQueue(simplify);
}
for (Formula pc : pcs) {
// A xor B
// <==> (A || B) && (!A || !B)
Formula trueFormula = setToValue(pc, variable, true, true);
Formula falseFormula = setToValue(pc, variable, false, true);
// (A || B)
innerElements.add(trueFormula);
innerElements.add(falseFormula);
Formula atLeastOnePositive = innerElements.getDisjunction(variable);
// (!A || !B)
innerElements.add(new Negation(trueFormula));
innerElements.add(new Negation(falseFormula));
Formula atLeastOneNegative = innerElements.getDisjunction(variable);
Formula xor;
if (atLeastOnePositive == null && atLeastOneNegative != null) {
xor = atLeastOneNegative;
} else if (atLeastOnePositive != null && atLeastOneNegative == null) {
xor = atLeastOnePositive;
} else {
xor = new Conjunction(atLeastOnePositive, atLeastOneNegative);
}
xorTrees.add(xor);
}
Formula result = xorTrees.getDisjunction(variable);
return result;
}
@Override
public String getResultName() {
return "Feature Effects";
}
}
| src/net/ssehub/kernel_haven/feature_effects/FeatureEffectFinder.java | package net.ssehub.kernel_haven.feature_effects;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import net.ssehub.kernel_haven.SetUpException;
import net.ssehub.kernel_haven.analysis.AnalysisComponent;
import net.ssehub.kernel_haven.config.Configuration;
import net.ssehub.kernel_haven.feature_effects.FeatureEffectFinder.VariableWithFeatureEffect;
import net.ssehub.kernel_haven.feature_effects.PcFinder.VariableWithPcs;
import net.ssehub.kernel_haven.feature_effects.Settings.SimplificationType;
import net.ssehub.kernel_haven.util.io.TableElement;
import net.ssehub.kernel_haven.util.io.TableRow;
import net.ssehub.kernel_haven.util.logic.Conjunction;
import net.ssehub.kernel_haven.util.logic.Disjunction;
import net.ssehub.kernel_haven.util.logic.DisjunctionQueue;
import net.ssehub.kernel_haven.util.logic.False;
import net.ssehub.kernel_haven.util.logic.Formula;
import net.ssehub.kernel_haven.util.logic.Negation;
import net.ssehub.kernel_haven.util.logic.True;
import net.ssehub.kernel_haven.util.logic.Variable;
/**
* A component that finds feature effects for variables.
*
* @author Adam
*/
public class FeatureEffectFinder extends AnalysisComponent<VariableWithFeatureEffect> {
/**
* A variable together with its feature effect formula.
*
* @author Adam
*/
@TableRow
public static class VariableWithFeatureEffect {
private String variable;
private Formula featureEffect;
/**
* Creates a new feature effect result.
*
* @param variable The variable name.
* @param featureEffect The feature effect of the given variable. Must not be <code>null</code>.
*/
public VariableWithFeatureEffect(String variable, Formula featureEffect) {
this.variable = variable;
this.featureEffect = featureEffect;
}
/**
* Returns the variable name.
*
* @return The name of the variable.
*/
@TableElement(name = "Variable", index = 0)
public String getVariable() {
return variable;
}
/**
* Returns the feature effect formula for this variable.
*
* @return The feature effect, never <code>null</code>.
*/
@TableElement(name = "Feature Effect", index = 1)
public Formula getFeatureEffect() {
return featureEffect;
}
@Override
public String toString() {
return "FeatureEffect[" + variable + "] = " + featureEffect.toString();
}
}
private AnalysisComponent<VariableWithPcs> pcFinder;
private PresenceConditionAnalysisHelper helper;
private SimplificationType simplifyType;
private FormulaSimplifier simplifier = null;
/**
* Creates a new {@link FeatureEffectFinder} for the given PC finder.
*
* @param config The global configuration.
* @param pcFinder The component to get the PCs from.
*
* @throws SetUpException If creating this component fails.
*/
public FeatureEffectFinder(Configuration config, AnalysisComponent<VariableWithPcs> pcFinder)
throws SetUpException {
super(config);
this.pcFinder = pcFinder;
this.helper = new PresenceConditionAnalysisHelper(config);
simplifyType = helper.getSimplificationMode();
if (simplifyType.ordinal() >= SimplificationType.PRESENCE_CONDITIONS.ordinal()) {
// Will throw an exception if CNF Utils are not present (but was selected by user in configuration file)
simplifier = new FormulaSimplifier();
}
}
@Override
protected void execute() {
VariableWithPcs pcs;
while ((pcs = pcFinder.getNextResult()) != null) {
if (helper.isRelevant(pcs.getVariable())) {
addResult(new VariableWithFeatureEffect(
helper.doReplacements(pcs.getVariable()),
helper.doReplacements(buildFeatureEffefct(pcs))
));
}
}
}
/**
* Replaces each occurrence of a variable with a constant.
*
* TODO: move this to general utils.
*
* @param formula The formula to replace the variable in; this formula is not altered.
* @param variable The variable to replace.
* @param value Which constant the variable should be replaced with.
* @param exactMatch Whether the variable name has to match exactly. If <code>false</code>, then startsWith()
* is used to find matches to replace.
*
* @return A new Formula equal to the given formula, but with each occurrence of the variable replaced.
*/
private Formula setToValue(Formula formula, String variable, boolean value, boolean exactMatch) {
Formula result;
if (formula instanceof Variable) {
Variable var = (Variable) formula;
boolean replace;
if (exactMatch) {
replace = var.getName().equals(variable);
} else {
replace = var.getName().startsWith(variable);
}
if (replace) {
result = (value ? True.INSTANCE : False.INSTANCE);
} else {
result = var;
}
} else if (formula instanceof Negation) {
result = new Negation(setToValue(((Negation) formula).getFormula(), variable, value, exactMatch));
} else if (formula instanceof Disjunction) {
result = new Disjunction(
setToValue(((Disjunction) formula).getLeft(), variable, value, exactMatch),
setToValue(((Disjunction) formula).getRight(), variable, value, exactMatch));
} else if (formula instanceof Conjunction) {
result = new Conjunction(
setToValue(((Conjunction) formula).getLeft(), variable, value, exactMatch),
setToValue(((Conjunction) formula).getRight(), variable, value, exactMatch));
} else {
result = formula;
}
return result;
}
/**
* Simplifies boolean formulas a bit. The following simplification rules are done:
* <ul>
* <li>NOT(NOT(a)) -> a</li>
* <li>NOT(true) -> false</li>
* <li>NOT(false) -> true</li>
*
* <li>true OR a -> true</li>
* <li>a OR true -> true</li>
* <li>false OR false -> false</li>
* <li>a OR false -> a</li>
* <li>false OR a -> a</li>
* <li>a OR a -> a</li>
*
* <li>false AND a -> false</li>
* <li>a AND false -> false</li>
* <li>true AND true -> true</li>
* <li>a AND true -> a</li>
* <li>true AND a -> a</li>
* <li>a AND a -> a</li>
* </ul>
*
* TODO: move this to general utils.
*
* @param formula The formula to simplify.
* @return A new formula equal to the original, but simplified.
*/
private Formula simplify(Formula formula) {
Formula result;
if (formula instanceof Negation) {
Formula nested = simplify(((Negation) formula).getFormula());
if (nested instanceof Negation) {
result = ((Negation) nested).getFormula();
} else if (nested instanceof True) {
result = False.INSTANCE;
} else if (nested instanceof False) {
result = True.INSTANCE;
} else {
result = new Negation(nested);
}
} else if (formula instanceof Disjunction) {
Formula left = simplify(((Disjunction) formula).getLeft());
Formula right = simplify(((Disjunction) formula).getRight());
if (left instanceof True || right instanceof True) {
result = True.INSTANCE;
} else if (left instanceof False && right instanceof False) {
result = False.INSTANCE;
} else if (left instanceof False) {
result = right;
} else if (right instanceof False) {
result = left;
} else if (left.equals(right)) {
result = left;
} else {
result = new Disjunction(left, right);
}
} else if (formula instanceof Conjunction) {
Formula left = simplify(((Conjunction) formula).getLeft());
Formula right = simplify(((Conjunction) formula).getRight());
if (left instanceof False || right instanceof False) {
result = False.INSTANCE;
} else if (left instanceof True && right instanceof True) {
result = True.INSTANCE;
} else if (left instanceof True) {
result = right;
} else if (right instanceof True) {
result = left;
} else if (left.equals(right)) {
result = left;
} else {
result = new Conjunction(left, right);
}
} else {
result = formula;
}
return result;
}
/**
* Creates a feature effect for the given variable and it's PCs.
* A feature effect is defined as:
* <code>Or over (for each PC in PCs ( PC[variable <- true] XOR PC[variable <- false] ))</code>.
*
*
* @param varWithPcs The variable and all presence condition that the variable appears in.
* @return A formula representing the feature effect of the variable.
*/
private Formula buildFeatureEffefct(VariableWithPcs varWithPcs) {
String variable = varWithPcs.getVariable();
Collection<Formula> pcs = varWithPcs.getPcs();
boolean simplify = simplifyType.ordinal() >= SimplificationType.PRESENCE_CONDITIONS.ordinal();
// This eliminates "duplicated" formulas, this is not done in simplifications for presence conditions.
if (simplifyType.ordinal() > SimplificationType.PRESENCE_CONDITIONS.ordinal()) {
// Simplification wasn't applied to separate presence conditions before, do this here
List<Formula> tmp = new ArrayList<>(pcs.size());
for (Formula formula : pcs) {
tmp.add(simplifier.simplify(formula));
}
pcs = tmp;
}
Collection<Formula> filteredFormula = simplify ? FeatureEffectReducer.simpleReduce(variable, pcs) : pcs;
Formula result = createXorTree(variable, simplify, filteredFormula);
if (helper.isNonBooleanReplacements()) {
int index = variable.indexOf("_eq_");
if (index != -1) {
String varBaseName = variable.substring(0, index);
result = setToValue(result, varBaseName + "_", false, false);
}
}
Formula simplifiedResult;
if (simplify) {
// Perform a simplification on the final result
simplifiedResult = simplifier.simplify(result);
} else {
// At least try to resolve all the (unnecessary) XORs
simplifiedResult = simplify(result);
}
return simplifiedResult;
}
/**
* Creates the disjunction of the XOR elements as needed by the Feature effect algorithm.
* @param variable The variable name for which we currently compute the feature effect.
* @param simplify <tt>true</tt> if the result should be simplified
* @param pcs The presence conditions relevant for the variable.
* @return The feature effect constraint (pre-condition).
*/
private Formula createXorTree(String variable, boolean simplify, Collection<Formula> pcs) {
DisjunctionQueue innerElements;
DisjunctionQueue xorTrees;
if (null != simplifier) {
innerElements = new DisjunctionQueue(true, f -> simplifier.simplify(f));
xorTrees = new DisjunctionQueue(simplify, f -> simplifier.simplify(f));
} else {
innerElements = new DisjunctionQueue(true);
xorTrees = new DisjunctionQueue(simplify);
}
for (Formula pc : pcs) {
// A xor B
// <==> (A || B) && (!A || !B)
Formula trueFormula = setToValue(pc, variable, true, true);
Formula falseFormula = setToValue(pc, variable, false, true);
// (A || B)
innerElements.add(trueFormula);
innerElements.add(falseFormula);
Formula atLeastOnePositive = innerElements.getDisjunction(variable);
// (!A || !B)
innerElements.add(new Negation(trueFormula));
innerElements.add(new Negation(falseFormula));
Formula atLeastOneNegative = innerElements.getDisjunction(variable);
Formula xor;
if (atLeastOnePositive == null && atLeastOneNegative != null) {
xor = atLeastOneNegative;
} else if (atLeastOnePositive != null && atLeastOneNegative == null) {
xor = atLeastOnePositive;
} else {
xor = new Conjunction(atLeastOnePositive, atLeastOneNegative);
}
xorTrees.add(xor);
}
Formula result = xorTrees.getDisjunction(variable);
return result;
}
@Override
public String getResultName() {
return "Feature Effects";
}
}
| Comments & minimalistic refactoring | src/net/ssehub/kernel_haven/feature_effects/FeatureEffectFinder.java | Comments & minimalistic refactoring | <ide><path>rc/net/ssehub/kernel_haven/feature_effects/FeatureEffectFinder.java
<ide> boolean simplify = simplifyType.ordinal() >= SimplificationType.PRESENCE_CONDITIONS.ordinal();
<ide>
<ide> // This eliminates "duplicated" formulas, this is not done in simplifications for presence conditions.
<add> pcs = simplify ? FeatureEffectReducer.simpleReduce(variable, pcs) : pcs;
<add>
<add> // Check if presence conditions have already been simplified in earlier step
<ide> if (simplifyType.ordinal() > SimplificationType.PRESENCE_CONDITIONS.ordinal()) {
<ide> // Simplification wasn't applied to separate presence conditions before, do this here
<ide> List<Formula> tmp = new ArrayList<>(pcs.size());
<ide> }
<ide> pcs = tmp;
<ide> }
<del> Collection<Formula> filteredFormula = simplify ? FeatureEffectReducer.simpleReduce(variable, pcs) : pcs;
<del>
<del> Formula result = createXorTree(variable, simplify, filteredFormula);
<add>
<add> Formula result = createXorTree(variable, simplify, pcs);
<ide> if (helper.isNonBooleanReplacements()) {
<ide> int index = variable.indexOf("_eq_");
<ide> |
|
Java | apache-2.0 | 0f3a895c86dfce538d5f32c5306f6493d74407da | 0 | SpineEventEngine/gae-java,SpineEventEngine/gae-java | /*
* Copyright 2019, TeamDev. All rights reserved.
*
* Redistribution and use in source and/or binary forms, with or without
* modification, must retain the above copyright notice and the following
* disclaimer.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package io.spine.server.storage.datastore;
import com.google.cloud.datastore.Key;
import com.google.cloud.datastore.StructuredQuery;
import com.google.common.truth.IterableSubject;
import com.google.protobuf.Any;
import com.google.protobuf.FieldMask;
import com.google.protobuf.Message;
import com.google.protobuf.Timestamp;
import io.spine.client.CompositeFilter;
import io.spine.client.IdFilter;
import io.spine.client.TargetFilters;
import io.spine.core.Version;
import io.spine.core.Versions;
import io.spine.protobuf.AnyPacker;
import io.spine.server.entity.Entity;
import io.spine.server.entity.EntityRecord;
import io.spine.server.entity.LifecycleFlags;
import io.spine.server.entity.storage.EntityQueries;
import io.spine.server.entity.storage.EntityQuery;
import io.spine.server.entity.storage.EntityRecordWithColumns;
import io.spine.server.storage.RecordReadRequest;
import io.spine.server.storage.RecordStorage;
import io.spine.server.storage.RecordStorageTest;
import io.spine.server.storage.datastore.given.CollegeEntity;
import io.spine.server.storage.datastore.given.DsRecordStorageTestEnv;
import io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.EntityWithCustomColumnName;
import io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.TestEntity;
import io.spine.server.storage.datastore.given.TestConstCounterEntity;
import io.spine.server.storage.given.RecordStorageTestEnv.TestCounterEntity;
import io.spine.test.datastore.College;
import io.spine.test.datastore.CollegeId;
import io.spine.test.storage.Project;
import io.spine.test.storage.ProjectId;
import io.spine.test.storage.Task;
import io.spine.type.TypeUrl;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import static com.google.common.collect.Lists.newArrayList;
import static com.google.common.collect.Lists.reverse;
import static com.google.common.truth.Truth.assertThat;
import static com.google.protobuf.util.Timestamps.toSeconds;
import static io.spine.base.Time.getCurrentTime;
import static io.spine.client.Filters.all;
import static io.spine.client.Filters.either;
import static io.spine.client.Filters.eq;
import static io.spine.client.Filters.gt;
import static io.spine.client.Filters.lt;
import static io.spine.json.Json.toCompactJson;
import static io.spine.protobuf.AnyPacker.pack;
import static io.spine.protobuf.AnyPacker.unpack;
import static io.spine.server.entity.FieldMasks.applyMask;
import static io.spine.server.entity.storage.EntityRecordWithColumns.create;
import static io.spine.server.storage.datastore.given.CollegeEntity.CollegeColumn.ADMISSION_DEADLINE;
import static io.spine.server.storage.datastore.given.CollegeEntity.CollegeColumn.CREATED;
import static io.spine.server.storage.datastore.given.CollegeEntity.CollegeColumn.NAME;
import static io.spine.server.storage.datastore.given.CollegeEntity.CollegeColumn.PASSING_GRADE;
import static io.spine.server.storage.datastore.given.CollegeEntity.CollegeColumn.STATE_SPONSORED;
import static io.spine.server.storage.datastore.given.CollegeEntity.CollegeColumn.STUDENT_COUNT;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.COLUMN_NAME_FOR_STORING;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.UNORDERED_COLLEGE_NAMES;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.ascendingBy;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.assertSortedBooleans;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.combine;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.createAndStoreEntities;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.createAndStoreEntitiesWithNullStudentCount;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.createAndStoreEntity;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.datastoreFactory;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.descendingBy;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.emptyFieldMask;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.emptyFilters;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.emptyIdFilter;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.emptyOrderBy;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.emptyPagination;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.getStateSponsoredValues;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.newCollegeId;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.newEntityRecord;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.newIdFilter;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.newTargetFilters;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.nullableStudentCount;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.pagination;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.recordIds;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.sortedIds;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.sortedValues;
import static java.util.Collections.singletonList;
import static java.util.stream.Collectors.toList;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyIterable;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
@DisplayName("DsRecordStorage should")
class DsRecordStorageTest extends RecordStorageTest<DsRecordStorage<ProjectId>> {
private final TestDatastoreStorageFactory datastoreFactory = datastoreFactory();
@SuppressWarnings("unchecked") // OK for tests.
@Override
protected DsRecordStorage<ProjectId> newStorage(Class<? extends Entity> entityClass) {
Class<? extends Entity<ProjectId, ?>> cls =
(Class<? extends Entity<ProjectId, ?>>) entityClass;
return (DsRecordStorage<ProjectId>) datastoreFactory.createRecordStorage(cls);
}
@Override
protected Class<? extends TestCounterEntity> getTestEntityClass() {
return TestEntity.class;
}
@Override
protected Message newState(ProjectId projectId) {
Project project = Project
.newBuilder()
.setId(projectId)
.setName("Some test name")
.addTask(Task.getDefaultInstance())
.setStatus(Project.Status.CREATED)
.build();
return project;
}
private EntityRecordWithColumns newRecordWithColumns(RecordStorage<ProjectId> storage) {
EntityRecord record = newStorageRecord();
Entity<ProjectId, Project> entity = TestConstCounterEntity.create(newId());
EntityRecordWithColumns recordWithColumns = create(record, entity, storage);
return recordWithColumns;
}
@BeforeEach
void setUp() {
datastoreFactory.setUp();
}
@AfterEach
void tearDown() {
datastoreFactory.tearDown();
}
@Test
@DisplayName("provide access to DatastoreWrapper for extensibility")
void testAccessDatastoreWrapper() {
DsRecordStorage<ProjectId> storage = getStorage();
DatastoreWrapper datastore = storage.getDatastore();
assertNotNull(datastore);
}
@Test
@DisplayName("provide access to TypeUrl for extensibility")
void testAccessTypeUrl() {
DsRecordStorage<ProjectId> storage = getStorage();
TypeUrl typeUrl = storage.getTypeUrl();
assertNotNull(typeUrl);
// According to the `TestConstCounterEntity` declaration.
assertEquals(TypeUrl.of(Project.class), typeUrl);
}
@SuppressWarnings("OverlyLongMethod")
// A complicated test case verifying right Datastore behavior on
// a low level of DatastoreWrapper and Datastore Entity.
// Additionally checks the standard predefined Datastore Column Types
@Test
@DisplayName("persist entity columns beside the corresponding record")
void testPersistColumns() {
String counter = "counter";
String bigCounter = "bigCounter";
String counterEven = "counterEven";
String counterVersion = "counterVersion";
@SuppressWarnings("DuplicateStringLiteralInspection") // common column name
String creationTime = "creationTime";
String counterState = "counterState";
String version = "version";
String archived = "archived";
String deleted = "deleted";
ProjectId id = newId();
Project state = (Project) newState(id);
Version versionValue = Versions.newVersion(5, getCurrentTime());
TestConstCounterEntity entity = TestConstCounterEntity.create(id, state);
EntityRecord record = EntityRecord.newBuilder()
.setState(pack(state))
.setEntityId(pack(id))
.setVersion(versionValue)
.build();
DsRecordStorage<ProjectId> storage = newStorage(TestConstCounterEntity.class);
EntityRecordWithColumns recordWithColumns = create(record, entity, storage);
Collection<String> columns = recordWithColumns.getColumnNames();
assertNotNull(columns);
IterableSubject assertColumns = assertThat(columns);
// Custom Columns
assertColumns.contains(counter);
assertColumns.contains(bigCounter);
assertColumns.contains(counterEven);
assertColumns.contains(counterVersion);
assertColumns.contains(creationTime);
assertColumns.contains(counterState);
// Columns defined in superclasses
assertColumns.contains(version);
assertColumns.contains(archived);
assertColumns.contains(deleted);
// High level write operation
storage.write(id, recordWithColumns);
// Read Datastore Entity
DatastoreWrapper datastore = storage.getDatastore();
Key key = datastore.keyFor(
Kind.of(state),
RecordId.ofEntityId(id));
com.google.cloud.datastore.Entity datastoreEntity = datastore.read(key);
// Check entity record
TypeUrl recordType = TypeUrl.from(EntityRecord.getDescriptor());
EntityRecord readRecord = Entities.toMessage(datastoreEntity, recordType);
assertEquals(record, readRecord);
// Check custom Columns
assertEquals(entity.getCounter(), datastoreEntity.getLong(counter));
assertEquals(entity.getBigCounter(), datastoreEntity.getLong(bigCounter));
assertEquals(entity.getCounterVersion()
.getNumber(), datastoreEntity.getLong(counterVersion));
com.google.cloud.Timestamp actualCreationTime =
datastoreEntity.getTimestamp(creationTime);
assertEquals(toSeconds(entity.getCreationTime()), actualCreationTime.getSeconds());
assertEquals(entity.getCreationTime()
.getNanos(), actualCreationTime.getNanos());
assertEquals(entity.isCounterEven(), datastoreEntity.getBoolean(counterEven));
assertEquals(toCompactJson(entity.getCounterState()),
datastoreEntity.getString(counterState));
// Check standard Columns
assertEquals(entity.getVersion()
.getNumber(), datastoreEntity.getLong(version));
assertEquals(entity.isArchived(), datastoreEntity.getBoolean(archived));
assertEquals(entity.isDeleted(), datastoreEntity.getBoolean(deleted));
}
@Test
@DisplayName("pass big data speed test")
void testBigData() {
// Default bulk size is 500 records - the maximum records that could be written within
// one write operation
long maxReadTime = 1000;
long maxWriteTime = 9500;
DsRecordStorage<ProjectId> storage = newStorage(TestConstCounterEntity.class);
BigDataTester.<ProjectId>newBuilder()
.setEntryFactory(new BigDataTester.EntryFactory<ProjectId>() {
@Override
public ProjectId newId() {
return DsRecordStorageTest.this.newId();
}
@Override
public EntityRecordWithColumns newRecord() {
return DsRecordStorageTest.this.newRecordWithColumns(storage);
}
})
.setReadLimit(maxReadTime)
.setWriteLimit(maxWriteTime)
.build()
.testBigDataOperations(storage);
}
@Test
@DisplayName("write and read records with lifecycle flags by ID")
void testLifecycleFlags() {
ProjectId id = newId();
LifecycleFlags lifecycle = LifecycleFlags
.newBuilder()
.setArchived(true)
.build();
EntityRecord record = EntityRecord
.newBuilder()
.setState(pack(newState(id)))
.setLifecycleFlags(lifecycle)
.setEntityId(pack(id))
.build();
TestConstCounterEntity entity = TestConstCounterEntity.create(id);
entity.injectLifecycle(lifecycle);
RecordStorage<ProjectId> storage = newStorage(TestConstCounterEntity.class);
EntityRecordWithColumns recordWithColumns = create(record, entity, storage);
storage.write(id, recordWithColumns);
RecordReadRequest<ProjectId> request = new RecordReadRequest<>(id);
Optional<EntityRecord> restoredRecordOptional = storage.read(request);
assertTrue(restoredRecordOptional.isPresent());
// Includes Lifecycle flags comparison
EntityRecord restoredRecord = restoredRecordOptional.get();
assertEquals(record, restoredRecord);
}
@Test
@DisplayName("convert entity record to entity using column name for storing")
void testUseColumnStoreName() {
DsRecordStorage<ProjectId> storage = newStorage(EntityWithCustomColumnName.class);
ProjectId id = newId();
EntityRecord record = newEntityRecord(id, newState(id));
Entity entity = new EntityWithCustomColumnName(id);
EntityRecordWithColumns entityRecordWithColumns = create(record, entity, storage);
com.google.cloud.datastore.Entity datastoreEntity =
storage.entityRecordToEntity(id, entityRecordWithColumns);
Set<String> propertiesName = datastoreEntity.getNames();
assertTrue(propertiesName.contains(COLUMN_NAME_FOR_STORING));
}
@Nested
@DisplayName("lookup Datastore records by IDs")
class LookupByIds {
private DatastoreStorageFactory storageFactory;
private RecordStorage<CollegeId> storage;
@BeforeEach
void setUp() {
SpyStorageFactory.injectWrapper(datastoreFactory().getDatastore());
storageFactory = new SpyStorageFactory();
storage = storageFactory.createRecordStorage(CollegeEntity.class);
}
@Test
@DisplayName("returning proper entity")
void testQueryByIDs() {
// Create 10 entities and pick one for tests.
int recordCount = 10;
int targetEntityIndex = 7;
List<CollegeEntity> entities = createAndStoreEntities(storage, recordCount);
CollegeEntity targetEntity = entities.get(targetEntityIndex);
// Create ID filter.
Any targetId = pack(targetEntity.getId());
IdFilter idFilter = newIdFilter(targetId);
// Create column filter.
Timestamp targetColumnValue = targetEntity.getCreationTime();
CompositeFilter columnFilter = all(eq(CREATED.columnName(), targetColumnValue));
// Compose Query filters.
TargetFilters entityFilters = newTargetFilters(idFilter, columnFilter);
// Compose Query.
EntityQuery<CollegeId> entityQuery =
EntityQueries.from(entityFilters, emptyOrderBy(), emptyPagination(), storage);
// Execute Query.
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(1, resultList.size());
// Check the record state.
EntityRecord record = resultList.get(0);
assertEquals(targetEntity.getState(), unpack(record.getState()));
assertDsReadByKeys();
}
@Test
@DisplayName("in descending sort order")
void testQueryByIDsWithDescendingOrder() {
// Create entities.
int recordCount = UNORDERED_COLLEGE_NAMES.size();
List<CollegeEntity> entities = createAndStoreEntities(storage, UNORDERED_COLLEGE_NAMES);
// Create ID filter.
List<Any> targetIds = idsAsAny(entities);
IdFilter idFilter = newIdFilter(targetIds);
// Compose Query filters.
TargetFilters entityFilters = newTargetFilters(idFilter);
// Compose Query.
EntityQuery<CollegeId> entityQuery = EntityQueries.from(entityFilters,
descendingBy(NAME),
emptyPagination(),
storage);
// Execute Query.
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(recordCount, resultList.size());
// Check the entities were ordered.
List<CollegeId> expectedResults = reverse(sortedIds(entities, CollegeEntity::getName));
List<CollegeId> actualResults = recordIds(resultList);
assertEquals(expectedResults, actualResults);
assertDsReadByKeys();
}
@Test
@DisplayName("in an order specified by string field")
void testQueryByIDsWithOrderByString() {
testOrdering(NAME, CollegeEntity::getName);
}
@Test
@DisplayName("in order specified by double field")
void testQueryByIDsWithOrderByDouble() {
testOrdering(PASSING_GRADE, CollegeEntity::getPassingGrade);
}
@Test
@DisplayName("in order specified by timestamp field")
void testQueryByIDsWithOrderByTimestamp() {
testOrdering(ADMISSION_DEADLINE, entity -> entity.getAdmissionDeadline()
.getSeconds());
}
@Test
@DisplayName("in an order specified by integer")
void testQueryByIDsWithOrderByInt() {
testOrdering(STUDENT_COUNT, CollegeEntity::getStudentCount);
}
/**
* Uses local {@link SpyStorageFactory} so cannot be moved to test environment.
*/
private <T extends Comparable<T>> void
testOrdering(CollegeEntity.CollegeColumn column, Function<CollegeEntity, T> property) {
// Create entities.
int expectedRecordCount = UNORDERED_COLLEGE_NAMES.size() - 2;
List<CollegeEntity> entities =
createAndStoreEntities(storage, UNORDERED_COLLEGE_NAMES)
.subList(0, expectedRecordCount);
// Create ID filter.
List<Any> targetIds = idsAsAny(entities);
IdFilter idFilter = newIdFilter(targetIds);
// Compose Query filters.
TargetFilters entityFilters = newTargetFilters(idFilter);
// Compose Query.
EntityQuery<CollegeId> entityQuery = EntityQueries.from(entityFilters,
ascendingBy(column),
emptyPagination(),
storage);
// Execute Query.
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(expectedRecordCount, resultList.size());
// Check the entities were ordered.
List<CollegeId> expectedResults = sortedIds(entities, property);
List<CollegeId> actualResults = recordIds(resultList);
assertEquals(expectedResults, actualResults);
assertDsReadByKeys();
}
private List<Any> idsAsAny(List<CollegeEntity> entities) {
return entities.stream()
.map(Entity::getId)
.map(AnyPacker::pack)
.collect(toList());
}
@Test
@DisplayName("in an order specified by boolean")
void testQueryByIDsWithOrderByBoolean() {
// Create entities.
int recordCount = 20;
List<CollegeEntity> entities = createAndStoreEntities(storage, recordCount);
// Create ID filter.
List<Any> targetIds = idsAsAny(entities);
IdFilter idFilter = newIdFilter(targetIds);
// Compose Query filters.
TargetFilters entityFilters = newTargetFilters(idFilter);
// Compose Query.
EntityQuery<CollegeId> entityQuery = EntityQueries.from(entityFilters,
ascendingBy(STATE_SPONSORED),
emptyPagination(),
storage);
// Execute Query.
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(recordCount, resultList.size());
// Check the entities were ordered.
List<Boolean> actualResults = getStateSponsoredValues(resultList);
assertSortedBooleans(actualResults);
assertDsReadByKeys();
}
@Test
@DisplayName("in specified order with nulls")
void testQueryByIDsWithOrderWithNulls() {
// Create entities.
int nullCount = 11;
int regularCount = 37;
int recordCount = regularCount + nullCount;
List<CollegeEntity> nullEntities =
createAndStoreEntitiesWithNullStudentCount(storage, nullCount);
List<CollegeEntity> regularEntities = createAndStoreEntities(storage, regularCount);
List<CollegeEntity> entities = combine(nullEntities, regularEntities);
// Create ID filter.
List<Any> targetIds = idsAsAny(entities);
IdFilter idFilter = newIdFilter(targetIds);
// Compose Query filters.
TargetFilters entityFilters = newTargetFilters(idFilter);
// Compose Query.
EntityQuery<CollegeId> entityQuery = EntityQueries.from(entityFilters,
ascendingBy(STUDENT_COUNT),
emptyPagination(),
storage);
// Execute Query.k
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(recordCount, resultList.size());
// Check the entities were ordered.
List<Integer> expectedCounts = sortedValues(entities, CollegeEntity::getStudentCount);
List<Integer> actualCounts = nullableStudentCount(resultList);
assertEquals(expectedCounts, actualCounts);
// Check Datastore reads are performed by keys but not using a structured query.
DatastoreWrapper spy = storageFactory.getDatastore();
verify(spy).read(anyIterable());
//noinspection unchecked OK for a generic class assignment in tests.
verify(spy, never()).read(any(StructuredQuery.class));
}
@Test
@DisplayName("in specified order with missing entities")
void testQueryByIDsWithOrderWithMissingEntities() {
// Create entities.
int recordCount = 12;
List<CollegeEntity> entities = createAndStoreEntities(storage, recordCount);
// Create ID filter.
List<Any> targetIds = idsAsAny(entities);
targetIds.add(2, pack(newCollegeId()));
targetIds.add(5, pack(newCollegeId()));
targetIds.add(7, pack(newCollegeId()));
IdFilter idFilter = newIdFilter(targetIds);
// Compose Query filters.
TargetFilters entityFilters = newTargetFilters(idFilter);
// Compose Query.
EntityQuery<CollegeId> entityQuery = EntityQueries.from(entityFilters,
ascendingBy(STUDENT_COUNT),
emptyPagination(),
storage);
// Execute Query.k
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(recordCount, resultList.size());
// Check the entities were ordered.
List<CollegeId> expectedResults = sortedIds(entities, CollegeEntity::getStudentCount);
List<CollegeId> actualResults = recordIds(resultList);
assertEquals(expectedResults, actualResults);
assertDsReadByKeys();
}
@Test
@DisplayName("a specified number of entities")
void testQueryByIDsWithLimit() {
// Create entities.
int expectedRecordCount = 4;
List<CollegeEntity> entities = createAndStoreEntities(storage, UNORDERED_COLLEGE_NAMES);
// Create ID filter.
List<Any> targetIds = idsAsAny(entities);
IdFilter idFilter = newIdFilter(targetIds);
// Compose Query filters.
TargetFilters entityFilters = newTargetFilters(idFilter);
// Compose Query.
EntityQuery<CollegeId> entityQuery = EntityQueries.from(entityFilters,
ascendingBy(NAME),
pagination(expectedRecordCount),
storage);
// Execute Query.
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(expectedRecordCount, resultList.size());
// Check the entities were ordered.
List<CollegeId> sortedIds = sortedIds(entities, CollegeEntity::getName);
List<CollegeId> expectedResults = sortedIds.subList(0, expectedRecordCount);
List<CollegeId> actualResults = recordIds(resultList);
assertEquals(expectedResults, actualResults);
assertDsReadByKeys();
}
@Test
@DisplayName("for entities without lifecycle")
void testQueryEntityWithoutLifecycleById() {
DsRecordStorage<ProjectId> storage = newStorage(EntityWithCustomColumnName.class);
ProjectId id = newId();
EntityRecord record = newEntityRecord(id, newState(id));
EntityWithCustomColumnName entity = new EntityWithCustomColumnName(id);
storage.writeRecord(entity.getId(), create(record, entity, storage));
// Create ID filter.
List<Any> targetIds = singletonList(pack(entity.getId()));
IdFilter idFilter = newIdFilter(targetIds);
// Compose Query filters.
TargetFilters entityFilters = newTargetFilters(idFilter);
// Compose Query.
EntityQuery<ProjectId> entityQuery = EntityQueries.from(entityFilters,
emptyOrderBy(),
emptyPagination(),
storage);
// Execute Query.
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
assertEquals(record, readResult.next());
assertFalse(readResult.hasNext());
}
private void assertDsReadByKeys() {
DatastoreWrapper spy = storageFactory.getDatastore();
verify(spy).read(anyIterable());
//noinspection unchecked OK for a generic class assignment in tests.
verify(spy, never()).read(any(StructuredQuery.class));
}
}
/**
* Overrides and disables test from parent: {@link RecordStorageTest#rewritingExisting()}.
*/
@Test
@DisplayName("given bulk of records, write them re-writing existing ones")
void rewritingExisting() {
}
@Nested
@DisplayName("lookup records in Datastore by columns")
class LookupByQueries {
private DatastoreStorageFactory storageFactory;
private RecordStorage<CollegeId> storage;
@BeforeEach
void setUp() {
SpyStorageFactory.injectWrapper(datastoreFactory().getDatastore());
storageFactory = new SpyStorageFactory();
storage = storageFactory.createRecordStorage(CollegeEntity.class);
}
@Test
@DisplayName("returning proper entity for single column")
void testQueryByColumn() {
// Create 10 entities and pick one for tests.
int recordCount = 10;
int targetEntityIndex = 7;
List<CollegeEntity> entities = createAndStoreEntities(storage, recordCount);
CollegeEntity targetEntity = entities.get(targetEntityIndex);
// Create column filter.
String targetColumnValue = targetEntity.getName();
CompositeFilter columnFilter = all(eq(NAME.columnName(), targetColumnValue));
// Compose Query filters.
TargetFilters entityFilters = newTargetFilters(emptyIdFilter(),
columnFilter);
// Compose Query.
EntityQuery<CollegeId> entityQuery =
EntityQueries.from(entityFilters, emptyOrderBy(), emptyPagination(), storage);
// Execute Query.
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(1, resultList.size());
// Check the record state.
EntityRecord record = resultList.get(0);
assertEquals(targetEntity.getState(), unpack(record.getState()));
assertDsReadByStructuredQuery();
}
@Test
@DisplayName("returning proper entity for multiple columns")
void testQueryByMultipleColumns() {
// Create 10 entities and pick one for tests.
int recordCount = 10;
int targetEntityIndex = 7;
List<CollegeEntity> entities = createAndStoreEntities(storage, recordCount);
CollegeEntity targetEntity = entities.get(targetEntityIndex);
// Create column filter.
CompositeFilter columnFilter = all(
eq(NAME.columnName(), targetEntity.getName()),
eq(CREATED.columnName(), targetEntity.getCreationTime())
);
TargetFilters entityFilters = newTargetFilters(emptyIdFilter(),
columnFilter);
// Compose Query.
EntityQuery<CollegeId> entityQuery =
EntityQueries.from(entityFilters, emptyOrderBy(), emptyPagination(), storage);
// Execute Query.
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(1, resultList.size());
// Check the record state.
EntityRecord record = resultList.get(0);
assertEquals(targetEntity.getState(), unpack(record.getState()));
assertDsReadByStructuredQuery();
}
@Test
@DisplayName("with masked state")
void testFieldMaskApplied() {
// Create 10 entities and pick one for tests.
int recordCount = 10;
int targetEntityIndex = 7;
List<CollegeEntity> entities = createAndStoreEntities(storage, recordCount);
CollegeEntity targetEntity = entities.get(targetEntityIndex);
// Create column filter.
CompositeFilter columnFilter = all(
eq(NAME.columnName(), targetEntity.getName()),
eq(CREATED.columnName(), targetEntity.getCreationTime())
);
TargetFilters entityFilters = newTargetFilters(emptyIdFilter(),
columnFilter);
// Compose Query.
EntityQuery<CollegeId> query =
EntityQueries.from(entityFilters, emptyOrderBy(), emptyPagination(), storage);
// Execute Query.
FieldMask mask = DsRecordStorageTestEnv.newFieldMask("id", "name");
Iterator<EntityRecord> readResult = storage.readAll(query,
DsRecordStorageTestEnv.newFieldMask(
"id", "name"));
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(1, resultList.size());
// Check the record state.
EntityRecord record = resultList.get(0);
College expectedState = applyMask(mask, targetEntity.getState());
College actualState = (College) unpack(record.getState());
assertNotEquals(targetEntity.getState(), actualState);
assertEquals(expectedState, actualState);
assertDsReadByStructuredQuery();
}
@Test
@DisplayName("in descending sort order")
void testQueryWithDescendingOrder() {
// Create entities.
int expectedRecordCount = UNORDERED_COLLEGE_NAMES.size();
List<CollegeEntity> entities = createAndStoreEntities(storage, UNORDERED_COLLEGE_NAMES);
EntityQuery<CollegeId> entityQuery = EntityQueries.from(emptyFilters(),
descendingBy(NAME),
emptyPagination(),
storage);
// Execute Query.
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(expectedRecordCount, resultList.size());
// Check the entities were ordered.
List<CollegeId> expectedResults = reverse(sortedIds(entities, CollegeEntity::getName));
List<CollegeId> actualResults = recordIds(resultList);
assertEquals(expectedResults, actualResults);
assertDsReadByStructuredQuery();
}
@Test
@DisplayName("in an order specified by string field")
void testQueryWithOrderByString() {
testOrdering(NAME, CollegeEntity::getName);
}
@Test
@DisplayName("in order specified by double field")
void testQueryWithOrderByDouble() {
testOrdering(PASSING_GRADE, CollegeEntity::getPassingGrade);
}
@Test
@DisplayName("in order specified by timestamp field")
void testQueryWithOrderByTimestamp() {
testOrdering(ADMISSION_DEADLINE, entity -> entity.getAdmissionDeadline()
.getSeconds());
}
@Test
@DisplayName("in an order specified by integer")
void testQueryWithOrderByInt() {
testOrdering(STUDENT_COUNT, CollegeEntity::getStudentCount);
}
/**
* Uses local {@link SpyStorageFactory} so cannot be moved to test environment.
*/
private <T extends Comparable<T>> void
testOrdering(CollegeEntity.CollegeColumn column, Function<CollegeEntity, T> property) {
// Create entities.
int recordCount = UNORDERED_COLLEGE_NAMES.size();
List<CollegeEntity> entities = createAndStoreEntities(storage, UNORDERED_COLLEGE_NAMES);
// Compose Query.
EntityQuery<CollegeId> entityQuery = EntityQueries.from(emptyFilters(),
ascendingBy(column),
emptyPagination(),
storage);
// Execute Query.
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(recordCount, resultList.size());
// Check the entities were ordered.
List<CollegeId> expectedResults = sortedIds(entities, property);
List<CollegeId> actualResults = recordIds(resultList);
assertEquals(expectedResults, actualResults);
assertDsReadByStructuredQuery();
}
@Test
@DisplayName("in an order specified by boolean")
void testQueryWithOrderByBoolean() {
// Create entities.
int recordCount = 20;
createAndStoreEntities(storage, recordCount);
// Compose Query.
EntityQuery<CollegeId> entityQuery = EntityQueries.from(emptyFilters(),
ascendingBy(STATE_SPONSORED),
emptyPagination(),
storage);
// Execute Query.
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(recordCount, resultList.size());
// Check the entities were ordered.
List<Boolean> actualResults = getStateSponsoredValues(resultList);
assertSortedBooleans(actualResults);
assertDsReadByStructuredQuery();
}
@Test
@DisplayName("in specified order with nulls")
void testQueryWithOrderWithNulls() {
// Create entities.
int nullCount = 5;
int regularCount = 12;
int recordCount = regularCount + nullCount;
List<CollegeEntity> nullEntities =
createAndStoreEntitiesWithNullStudentCount(storage, nullCount);
List<CollegeEntity> regularEntities = createAndStoreEntities(storage, regularCount);
List<CollegeEntity> entities = combine(nullEntities, regularEntities);
// Compose Query.
EntityQuery<CollegeId> entityQuery = EntityQueries.from(emptyFilters(),
ascendingBy(STUDENT_COUNT),
emptyPagination(),
storage);
// Execute Query.k
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(recordCount, resultList.size());
// Check the entities were ordered.
List<Integer> expectedCounts = sortedValues(entities, CollegeEntity::getStudentCount);
List<Integer> actualCounts = nullableStudentCount(resultList);
assertEquals(expectedCounts, actualCounts);
assertDsReadByStructuredQuery();
}
@Test
@DisplayName("a specified number of entities")
void testQueryWithLimit() {
// Create entities.
int expectedRecordCount = 4;
List<CollegeEntity> entities = createAndStoreEntities(storage, UNORDERED_COLLEGE_NAMES);
// Compose Query.
EntityQuery<CollegeId> entityQuery = EntityQueries.from(emptyFilters(),
ascendingBy(NAME),
pagination(expectedRecordCount),
storage);
// Execute Query.
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(expectedRecordCount, resultList.size());
// Check the entities were ordered.
List<CollegeId> sortedIds = sortedIds(entities, CollegeEntity::getName);
List<CollegeId> expectedResults = sortedIds.subList(0, expectedRecordCount);
List<CollegeId> actualResults = recordIds(resultList);
assertEquals(expectedResults, actualResults);
assertDsReadByStructuredQuery();
}
@Test
@DisplayName("with multiple Datastore reads")
void performsMultipleReads() {
createAndStoreEntities(storage, UNORDERED_COLLEGE_NAMES, 300, false);
createAndStoreEntities(storage, UNORDERED_COLLEGE_NAMES, 250, true);
createAndStoreEntities(storage, UNORDERED_COLLEGE_NAMES, 150, false);
List<CollegeEntity> entities = createAndStoreEntities(storage, UNORDERED_COLLEGE_NAMES,
150, true);
TargetFilters filters =
TargetFilters.newBuilder()
.addFilter(either(
lt(NAME.columnName(), UNORDERED_COLLEGE_NAMES.get(2)),
gt(NAME.columnName(), UNORDERED_COLLEGE_NAMES.get(2))
))
.addFilter(all(
eq(STATE_SPONSORED.columnName(), true),
eq(STUDENT_COUNT.columnName(), 150)
))
.build();
int recordCount = 5;
EntityQuery<CollegeId> query = EntityQueries.from(filters, ascendingBy(NAME),
pagination(recordCount), storage);
Iterator<EntityRecord> recordIterator = storage.readAll(query, emptyFieldMask());
List<EntityRecord> resultList = newArrayList(recordIterator);
assertEquals(recordCount, resultList.size());
// Check the entities were ordered.
entities.remove(2);
List<CollegeId> expectedResults = sortedIds(entities, CollegeEntity::getName);
List<CollegeId> actualResults = recordIds(resultList);
assertEquals(expectedResults, actualResults);
assertDsReadByStructuredQuery(2);
}
@Test
@DisplayName("returning single entity for multiple `EITHER` filter matches")
void testEitherFilterDuplicates() {
CollegeEntity entity = createAndStoreEntity(storage);
// Create `EITHER` column filter.
CompositeFilter eitherFilter = either(
eq(NAME.columnName(), entity.getName()),
eq(PASSING_GRADE.columnName(), entity.getPassingGrade())
);
TargetFilters entityFilters = newTargetFilters(emptyIdFilter(), eitherFilter);
// Compose Query.
EntityQuery<CollegeId> entityQuery =
EntityQueries.from(entityFilters, emptyOrderBy(), emptyPagination(), storage);
// Execute Query.
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the entity is "found" only once.
List<EntityRecord> foundEntities = newArrayList(readResult);
assertEquals(1, foundEntities.size());
// Check it's the target entity.
EntityRecord record = foundEntities.get(0);
assertEquals(entity.getState(), unpack(record.getState()));
// Check there were actually 2 Datastore reads.
assertDsReadByStructuredQuery(2);
}
private void assertDsReadByStructuredQuery() {
assertDsReadByStructuredQuery(1);
}
private void assertDsReadByStructuredQuery(int invocationCount) {
DatastoreWrapper spy = storageFactory.getDatastore();
verify(spy, never()).read(anyIterable());
//noinspection unchecked OK for a generic class assignment in tests.
verify(spy, times(invocationCount)).read(any(StructuredQuery.class));
}
}
/**
* A {@link TestDatastoreStorageFactory} which spies on its {@link DatastoreWrapper}.
*
* This class is not moved to the
* {@linkplain io.spine.server.storage.datastore.given.DsRecordStorageTestEnv test environment}
* because it uses package-private method of {@link DatastoreWrapper}.
*/
private static class SpyStorageFactory extends TestDatastoreStorageFactory {
private static DatastoreWrapper spyWrapper = null;
private static void injectWrapper(DatastoreWrapper wrapper) {
spyWrapper = spy(wrapper);
}
private SpyStorageFactory() {
super(spyWrapper.getDatastore());
}
@Override
protected DatastoreWrapper createDatastoreWrapper(Builder builder) {
return spyWrapper;
}
}
}
| datastore/src/test/java/io/spine/server/storage/datastore/DsRecordStorageTest.java | /*
* Copyright 2019, TeamDev. All rights reserved.
*
* Redistribution and use in source and/or binary forms, with or without
* modification, must retain the above copyright notice and the following
* disclaimer.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package io.spine.server.storage.datastore;
import com.google.cloud.datastore.Key;
import com.google.cloud.datastore.StructuredQuery;
import com.google.common.truth.IterableSubject;
import com.google.protobuf.Any;
import com.google.protobuf.FieldMask;
import com.google.protobuf.Message;
import com.google.protobuf.Timestamp;
import io.spine.client.CompositeFilter;
import io.spine.client.IdFilter;
import io.spine.client.TargetFilters;
import io.spine.core.Version;
import io.spine.core.Versions;
import io.spine.protobuf.AnyPacker;
import io.spine.server.entity.Entity;
import io.spine.server.entity.EntityRecord;
import io.spine.server.entity.LifecycleFlags;
import io.spine.server.entity.storage.EntityQueries;
import io.spine.server.entity.storage.EntityQuery;
import io.spine.server.entity.storage.EntityRecordWithColumns;
import io.spine.server.storage.RecordReadRequest;
import io.spine.server.storage.RecordStorage;
import io.spine.server.storage.RecordStorageTest;
import io.spine.server.storage.datastore.given.CollegeEntity;
import io.spine.server.storage.datastore.given.DsRecordStorageTestEnv;
import io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.EntityWithCustomColumnName;
import io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.TestEntity;
import io.spine.server.storage.datastore.given.TestConstCounterEntity;
import io.spine.server.storage.given.RecordStorageTestEnv.TestCounterEntity;
import io.spine.test.datastore.College;
import io.spine.test.datastore.CollegeId;
import io.spine.test.storage.Project;
import io.spine.test.storage.ProjectId;
import io.spine.test.storage.Task;
import io.spine.type.TypeUrl;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import static com.google.common.collect.Lists.newArrayList;
import static com.google.common.collect.Lists.reverse;
import static com.google.common.truth.Truth.assertThat;
import static com.google.protobuf.util.Timestamps.toSeconds;
import static io.spine.base.Time.getCurrentTime;
import static io.spine.client.Filters.all;
import static io.spine.client.Filters.either;
import static io.spine.client.Filters.eq;
import static io.spine.client.Filters.gt;
import static io.spine.client.Filters.lt;
import static io.spine.json.Json.toCompactJson;
import static io.spine.protobuf.AnyPacker.pack;
import static io.spine.protobuf.AnyPacker.unpack;
import static io.spine.server.entity.FieldMasks.applyMask;
import static io.spine.server.entity.storage.EntityRecordWithColumns.create;
import static io.spine.server.storage.datastore.given.CollegeEntity.CollegeColumn.ADMISSION_DEADLINE;
import static io.spine.server.storage.datastore.given.CollegeEntity.CollegeColumn.CREATED;
import static io.spine.server.storage.datastore.given.CollegeEntity.CollegeColumn.NAME;
import static io.spine.server.storage.datastore.given.CollegeEntity.CollegeColumn.PASSING_GRADE;
import static io.spine.server.storage.datastore.given.CollegeEntity.CollegeColumn.STATE_SPONSORED;
import static io.spine.server.storage.datastore.given.CollegeEntity.CollegeColumn.STUDENT_COUNT;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.COLUMN_NAME_FOR_STORING;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.UNORDERED_COLLEGE_NAMES;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.ascendingBy;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.assertSortedBooleans;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.combine;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.createAndStoreEntities;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.createAndStoreEntitiesWithNullStudentCount;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.createAndStoreEntity;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.datastoreFactory;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.descendingBy;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.emptyFieldMask;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.emptyFilters;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.emptyIdFilter;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.emptyOrderBy;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.emptyPagination;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.getStateSponsoredValues;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.newCollegeId;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.newEntityRecord;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.newIdFilter;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.newTargetFilters;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.nullableStudentCount;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.pagination;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.recordIds;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.sortedIds;
import static io.spine.server.storage.datastore.given.DsRecordStorageTestEnv.sortedValues;
import static java.util.Collections.singletonList;
import static java.util.stream.Collectors.toList;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyIterable;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
@DisplayName("DsRecordStorage should")
class DsRecordStorageTest extends RecordStorageTest<DsRecordStorage<ProjectId>> {
private final TestDatastoreStorageFactory datastoreFactory = datastoreFactory();
@SuppressWarnings("unchecked") // OK for tests.
@Override
protected DsRecordStorage<ProjectId> newStorage(Class<? extends Entity> entityClass) {
Class<? extends Entity<ProjectId, ?>> cls =
(Class<? extends Entity<ProjectId, ?>>) entityClass;
return (DsRecordStorage<ProjectId>) datastoreFactory.createRecordStorage(cls);
}
@Override
protected Class<? extends TestCounterEntity> getTestEntityClass() {
return TestEntity.class;
}
@Override
protected Message newState(ProjectId projectId) {
Project project = Project
.newBuilder()
.setId(projectId)
.setName("Some test name")
.addTask(Task.getDefaultInstance())
.setStatus(Project.Status.CREATED)
.build();
return project;
}
private EntityRecordWithColumns newRecordWithColumns(RecordStorage<ProjectId> storage) {
EntityRecord record = newStorageRecord();
Entity<ProjectId, Project> entity = TestConstCounterEntity.create(newId());
EntityRecordWithColumns recordWithColumns = create(record, entity, storage);
return recordWithColumns;
}
@BeforeEach
void setUp() {
datastoreFactory.setUp();
}
@AfterEach
void tearDown() {
datastoreFactory.tearDown();
}
@Test
@DisplayName("provide access to DatastoreWrapper for extensibility")
void testAccessDatastoreWrapper() {
DsRecordStorage<ProjectId> storage = getStorage();
DatastoreWrapper datastore = storage.getDatastore();
assertNotNull(datastore);
}
@Test
@DisplayName("provide access to TypeUrl for extensibility")
void testAccessTypeUrl() {
DsRecordStorage<ProjectId> storage = getStorage();
TypeUrl typeUrl = storage.getTypeUrl();
assertNotNull(typeUrl);
// According to the `TestConstCounterEntity` declaration.
assertEquals(TypeUrl.of(Project.class), typeUrl);
}
@SuppressWarnings("OverlyLongMethod")
// A complicated test case verifying right Datastore behavior on
// a low level of DatastoreWrapper and Datastore Entity.
// Additionally checks the standard predefined Datastore Column Types
@Test
@DisplayName("persist entity columns beside the corresponding record")
void testPersistColumns() {
String counter = "counter";
String bigCounter = "bigCounter";
String counterEven = "counterEven";
String counterVersion = "counterVersion";
@SuppressWarnings("DuplicateStringLiteralInspection") // common column name
String creationTime = "creationTime";
String counterState = "counterState";
String version = "version";
String archived = "archived";
String deleted = "deleted";
ProjectId id = newId();
Project state = (Project) newState(id);
Version versionValue = Versions.newVersion(5, getCurrentTime());
TestConstCounterEntity entity = TestConstCounterEntity.create(id, state);
EntityRecord record = EntityRecord.newBuilder()
.setState(pack(state))
.setEntityId(pack(id))
.setVersion(versionValue)
.build();
DsRecordStorage<ProjectId> storage = newStorage(TestConstCounterEntity.class);
EntityRecordWithColumns recordWithColumns = create(record, entity, storage);
Collection<String> columns = recordWithColumns.getColumnNames();
assertNotNull(columns);
IterableSubject assertColumns = assertThat(columns);
// Custom Columns
assertColumns.contains(counter);
assertColumns.contains(bigCounter);
assertColumns.contains(counterEven);
assertColumns.contains(counterVersion);
assertColumns.contains(creationTime);
assertColumns.contains(counterState);
// Columns defined in superclasses
assertColumns.contains(version);
assertColumns.contains(archived);
assertColumns.contains(deleted);
// High level write operation
storage.write(id, recordWithColumns);
// Read Datastore Entity
DatastoreWrapper datastore = storage.getDatastore();
Key key = datastore.keyFor(
Kind.of(state),
RecordId.ofEntityId(id));
com.google.cloud.datastore.Entity datastoreEntity = datastore.read(key);
// Check entity record
TypeUrl recordType = TypeUrl.from(EntityRecord.getDescriptor());
EntityRecord readRecord = Entities.toMessage(datastoreEntity, recordType);
assertEquals(record, readRecord);
// Check custom Columns
assertEquals(entity.getCounter(), datastoreEntity.getLong(counter));
assertEquals(entity.getBigCounter(), datastoreEntity.getLong(bigCounter));
assertEquals(entity.getCounterVersion()
.getNumber(), datastoreEntity.getLong(counterVersion));
com.google.cloud.Timestamp actualCreationTime =
datastoreEntity.getTimestamp(creationTime);
assertEquals(toSeconds(entity.getCreationTime()), actualCreationTime.getSeconds());
assertEquals(entity.getCreationTime()
.getNanos(), actualCreationTime.getNanos());
assertEquals(entity.isCounterEven(), datastoreEntity.getBoolean(counterEven));
assertEquals(toCompactJson(entity.getCounterState()),
datastoreEntity.getString(counterState));
// Check standard Columns
assertEquals(entity.getVersion()
.getNumber(), datastoreEntity.getLong(version));
assertEquals(entity.isArchived(), datastoreEntity.getBoolean(archived));
assertEquals(entity.isDeleted(), datastoreEntity.getBoolean(deleted));
}
@Test
@DisplayName("pass big data speed test")
void testBigData() {
// Default bulk size is 500 records - the maximum records that could be written within
// one write operation
long maxReadTime = 1000;
long maxWriteTime = 9500;
DsRecordStorage<ProjectId> storage = newStorage(TestConstCounterEntity.class);
BigDataTester.<ProjectId>newBuilder()
.setEntryFactory(new BigDataTester.EntryFactory<ProjectId>() {
@Override
public ProjectId newId() {
return DsRecordStorageTest.this.newId();
}
@Override
public EntityRecordWithColumns newRecord() {
return DsRecordStorageTest.this.newRecordWithColumns(storage);
}
})
.setReadLimit(maxReadTime)
.setWriteLimit(maxWriteTime)
.build()
.testBigDataOperations(storage);
}
@Test
@DisplayName("write and read records with lifecycle flags by ID")
void testLifecycleFlags() {
ProjectId id = newId();
LifecycleFlags lifecycle = LifecycleFlags
.newBuilder()
.setArchived(true)
.build();
EntityRecord record = EntityRecord
.newBuilder()
.setState(pack(newState(id)))
.setLifecycleFlags(lifecycle)
.setEntityId(pack(id))
.build();
TestConstCounterEntity entity = TestConstCounterEntity.create(id);
entity.injectLifecycle(lifecycle);
RecordStorage<ProjectId> storage = newStorage(TestConstCounterEntity.class);
EntityRecordWithColumns recordWithColumns = create(record, entity, storage);
storage.write(id, recordWithColumns);
RecordReadRequest<ProjectId> request = new RecordReadRequest<>(id);
Optional<EntityRecord> restoredRecordOptional = storage.read(request);
assertTrue(restoredRecordOptional.isPresent());
// Includes Lifecycle flags comparison
EntityRecord restoredRecord = restoredRecordOptional.get();
assertEquals(record, restoredRecord);
}
@Test
@DisplayName("convert entity record to entity using column name for storing")
void testUseColumnStoreName() {
DsRecordStorage<ProjectId> storage = newStorage(EntityWithCustomColumnName.class);
ProjectId id = newId();
EntityRecord record = newEntityRecord(id, newState(id));
Entity entity = new EntityWithCustomColumnName(id);
EntityRecordWithColumns entityRecordWithColumns = create(record, entity, storage);
com.google.cloud.datastore.Entity datastoreEntity =
storage.entityRecordToEntity(id, entityRecordWithColumns);
Set<String> propertiesName = datastoreEntity.getNames();
assertTrue(propertiesName.contains(COLUMN_NAME_FOR_STORING));
}
@Nested
@DisplayName("lookup Datastore records by IDs")
class LookupByIds {
private DatastoreStorageFactory storageFactory;
private RecordStorage<CollegeId> storage;
@BeforeEach
void setUp() {
SpyStorageFactory.injectWrapper(datastoreFactory().getDatastore());
storageFactory = new SpyStorageFactory();
storage = storageFactory.createRecordStorage(CollegeEntity.class);
}
@Test
@DisplayName("returning proper entity")
void testQueryByIDs() {
// Create 10 entities and pick one for tests.
int recordCount = 10;
int targetEntityIndex = 7;
List<CollegeEntity> entities = createAndStoreEntities(storage, recordCount);
CollegeEntity targetEntity = entities.get(targetEntityIndex);
// Create ID filter.
Any targetId = pack(targetEntity.getId());
IdFilter idFilter = newIdFilter(targetId);
// Create column filter.
Timestamp targetColumnValue = targetEntity.getCreationTime();
CompositeFilter columnFilter = all(eq(CREATED.columnName(), targetColumnValue));
// Compose Query filters.
TargetFilters entityFilters = newTargetFilters(idFilter, columnFilter);
// Compose Query.
EntityQuery<CollegeId> entityQuery =
EntityQueries.from(entityFilters, emptyOrderBy(), emptyPagination(), storage);
// Execute Query.
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(1, resultList.size());
// Check the record state.
EntityRecord record = resultList.get(0);
assertEquals(targetEntity.getState(), unpack(record.getState()));
assertDsReadByKeys();
}
@Test
@DisplayName("in descending sort order")
void testQueryByIDsWithDescendingOrder() {
// Create entities.
int recordCount = UNORDERED_COLLEGE_NAMES.size();
List<CollegeEntity> entities = createAndStoreEntities(storage, UNORDERED_COLLEGE_NAMES);
// Create ID filter.
List<Any> targetIds = idsAsAny(entities);
IdFilter idFilter = newIdFilter(targetIds);
// Compose Query filters.
TargetFilters entityFilters = newTargetFilters(idFilter);
// Compose Query.
EntityQuery<CollegeId> entityQuery = EntityQueries.from(entityFilters,
descendingBy(NAME),
emptyPagination(),
storage);
// Execute Query.
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(recordCount, resultList.size());
// Check the entities were ordered.
List<CollegeId> expectedResults = reverse(sortedIds(entities, CollegeEntity::getName));
List<CollegeId> actualResults = recordIds(resultList);
assertEquals(expectedResults, actualResults);
assertDsReadByKeys();
}
@Test
@DisplayName("in an order specified by string field")
void testQueryByIDsWithOrderByString() {
testOrdering(NAME, CollegeEntity::getName);
}
@Test
@DisplayName("in order specified by double field")
void testQueryByIDsWithOrderByDouble() {
testOrdering(PASSING_GRADE, CollegeEntity::getPassingGrade);
}
@Test
@DisplayName("in order specified by timestamp field")
void testQueryByIDsWithOrderByTimestamp() {
testOrdering(ADMISSION_DEADLINE, entity -> entity.getAdmissionDeadline()
.getSeconds());
}
@Test
@DisplayName("in an order specified by integer")
void testQueryByIDsWithOrderByInt() {
testOrdering(STUDENT_COUNT, CollegeEntity::getStudentCount);
}
/**
* Uses local {@link SpyStorageFactory} so cannot be moved to test environment.
*/
private <T extends Comparable<T>> void
testOrdering(CollegeEntity.CollegeColumn column, Function<CollegeEntity, T> property) {
// Create entities.
int expectedRecordCount = UNORDERED_COLLEGE_NAMES.size() - 2;
List<CollegeEntity> entities =
createAndStoreEntities(storage, UNORDERED_COLLEGE_NAMES)
.subList(0, expectedRecordCount);
// Create ID filter.
List<Any> targetIds = idsAsAny(entities);
IdFilter idFilter = newIdFilter(targetIds);
// Compose Query filters.
TargetFilters entityFilters = newTargetFilters(idFilter);
// Compose Query.
EntityQuery<CollegeId> entityQuery = EntityQueries.from(entityFilters,
ascendingBy(column),
emptyPagination(),
storage);
// Execute Query.
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(expectedRecordCount, resultList.size());
// Check the entities were ordered.
List<CollegeId> expectedResults = sortedIds(entities, property);
List<CollegeId> actualResults = recordIds(resultList);
assertEquals(expectedResults, actualResults);
assertDsReadByKeys();
}
private List<Any> idsAsAny(List<CollegeEntity> entities) {
return entities.stream()
.map(Entity::getId)
.map(AnyPacker::pack)
.collect(toList());
}
@Test
@DisplayName("in an order specified by boolean")
void testQueryByIDsWithOrderByBoolean() {
// Create entities.
int recordCount = 20;
List<CollegeEntity> entities = createAndStoreEntities(storage, recordCount);
// Create ID filter.
List<Any> targetIds = idsAsAny(entities);
IdFilter idFilter = newIdFilter(targetIds);
// Compose Query filters.
TargetFilters entityFilters = newTargetFilters(idFilter);
// Compose Query.
EntityQuery<CollegeId> entityQuery = EntityQueries.from(entityFilters,
ascendingBy(STATE_SPONSORED),
emptyPagination(),
storage);
// Execute Query.
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(recordCount, resultList.size());
// Check the entities were ordered.
List<Boolean> actualResults = getStateSponsoredValues(resultList);
assertSortedBooleans(actualResults);
assertDsReadByKeys();
}
@Test
@DisplayName("in specified order with nulls")
void testQueryByIDsWithOrderWithNulls() {
// Create entities.
int nullCount = 11;
int regularCount = 37;
int recordCount = regularCount + nullCount;
List<CollegeEntity> nullEntities =
createAndStoreEntitiesWithNullStudentCount(storage, nullCount);
List<CollegeEntity> regularEntities = createAndStoreEntities(storage, regularCount);
List<CollegeEntity> entities = combine(nullEntities, regularEntities);
// Create ID filter.
List<Any> targetIds = idsAsAny(entities);
IdFilter idFilter = newIdFilter(targetIds);
// Compose Query filters.
TargetFilters entityFilters = newTargetFilters(idFilter);
// Compose Query.
EntityQuery<CollegeId> entityQuery = EntityQueries.from(entityFilters,
ascendingBy(STUDENT_COUNT),
emptyPagination(),
storage);
// Execute Query.k
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(recordCount, resultList.size());
// Check the entities were ordered.
List<Integer> expectedCounts = sortedValues(entities, CollegeEntity::getStudentCount);
List<Integer> actualCounts = nullableStudentCount(resultList);
assertEquals(expectedCounts, actualCounts);
// Check Datastore reads are performed by keys but not using a structured query.
DatastoreWrapper spy = storageFactory.getDatastore();
verify(spy).read(anyIterable());
//noinspection unchecked OK for a generic class assignment in tests.
verify(spy, never()).read(any(StructuredQuery.class));
}
@Test
@DisplayName("in specified order with missing entities")
void testQueryByIDsWithOrderWithMissingEntities() {
// Create entities.
int recordCount = 12;
List<CollegeEntity> entities = createAndStoreEntities(storage, recordCount);
// Create ID filter.
List<Any> targetIds = idsAsAny(entities);
targetIds.add(2, pack(newCollegeId()));
targetIds.add(5, pack(newCollegeId()));
targetIds.add(7, pack(newCollegeId()));
IdFilter idFilter = newIdFilter(targetIds);
// Compose Query filters.
TargetFilters entityFilters = newTargetFilters(idFilter);
// Compose Query.
EntityQuery<CollegeId> entityQuery = EntityQueries.from(entityFilters,
ascendingBy(STUDENT_COUNT),
emptyPagination(),
storage);
// Execute Query.k
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(recordCount, resultList.size());
// Check the entities were ordered.
List<CollegeId> expectedResults = sortedIds(entities, CollegeEntity::getStudentCount);
List<CollegeId> actualResults = recordIds(resultList);
assertEquals(expectedResults, actualResults);
assertDsReadByKeys();
}
@Test
@DisplayName("a specified number of entities")
void testQueryByIDsWithLimit() {
// Create entities.
int expectedRecordCount = 4;
List<CollegeEntity> entities = createAndStoreEntities(storage, UNORDERED_COLLEGE_NAMES);
// Create ID filter.
List<Any> targetIds = idsAsAny(entities);
IdFilter idFilter = newIdFilter(targetIds);
// Compose Query filters.
TargetFilters entityFilters = newTargetFilters(idFilter);
// Compose Query.
EntityQuery<CollegeId> entityQuery = EntityQueries.from(entityFilters,
ascendingBy(NAME),
pagination(expectedRecordCount),
storage);
// Execute Query.
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(expectedRecordCount, resultList.size());
// Check the entities were ordered.
List<CollegeId> sortedIds = sortedIds(entities, CollegeEntity::getName);
List<CollegeId> expectedResults = sortedIds.subList(0, expectedRecordCount);
List<CollegeId> actualResults = recordIds(resultList);
assertEquals(expectedResults, actualResults);
assertDsReadByKeys();
}
@Test
@DisplayName("for entities without lifecycle")
void testQueryEntityWithoutLifecycleById() {
DsRecordStorage<ProjectId> storage = newStorage(EntityWithCustomColumnName.class);
ProjectId id = newId();
EntityRecord record = newEntityRecord(id, newState(id));
EntityWithCustomColumnName entity = new EntityWithCustomColumnName(id);
storage.writeRecord(entity.getId(), create(record, entity, storage));
// Create ID filter.
List<Any> targetIds = singletonList(pack(entity.getId()));
IdFilter idFilter = newIdFilter(targetIds);
// Compose Query filters.
TargetFilters entityFilters = newTargetFilters(idFilter);
// Compose Query.
EntityQuery<ProjectId> entityQuery = EntityQueries.from(entityFilters,
emptyOrderBy(),
emptyPagination(),
storage);
// Execute Query.
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
assertEquals(record, readResult.next());
assertFalse(readResult.hasNext());
}
private void assertDsReadByKeys() {
DatastoreWrapper spy = storageFactory.getDatastore();
verify(spy).read(anyIterable());
//noinspection unchecked OK for a generic class assignment in tests.
verify(spy, never()).read(any(StructuredQuery.class));
}
}
/**
* Overrides and disables test from parent: {@link RecordStorageTest#rewritingExisting()}.
*/
@Test
@DisplayName("given bulk of records, write them re-writing existing ones")
void rewritingExisting() {
}
@Nested
@DisplayName("lookup records in Datastore by columns")
class LookupByQueries {
private DatastoreStorageFactory storageFactory;
private RecordStorage<CollegeId> storage;
@BeforeEach
void setUp() {
SpyStorageFactory.injectWrapper(datastoreFactory().getDatastore());
storageFactory = new SpyStorageFactory();
storage = storageFactory.createRecordStorage(CollegeEntity.class);
}
@Test
@DisplayName("returning proper entity for single column")
void testQueryByColumn() {
// Create 10 entities and pick one for tests.
int recordCount = 10;
int targetEntityIndex = 7;
List<CollegeEntity> entities = createAndStoreEntities(storage, recordCount);
CollegeEntity targetEntity = entities.get(targetEntityIndex);
// Create column filter.
String targetColumnValue = targetEntity.getName();
CompositeFilter columnFilter = all(eq(NAME.columnName(), targetColumnValue));
// Compose Query filters.
TargetFilters entityFilters = newTargetFilters(emptyIdFilter(),
columnFilter);
// Compose Query.
EntityQuery<CollegeId> entityQuery =
EntityQueries.from(entityFilters, emptyOrderBy(), emptyPagination(), storage);
// Execute Query.
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(1, resultList.size());
// Check the record state.
EntityRecord record = resultList.get(0);
assertEquals(targetEntity.getState(), unpack(record.getState()));
assertDsReadByStructuredQuery();
}
@Test
@DisplayName("returning proper entity for multiple columns")
void testQueryByMultipleColumns() {
// Create 10 entities and pick one for tests.
int recordCount = 10;
int targetEntityIndex = 7;
List<CollegeEntity> entities = createAndStoreEntities(storage, recordCount);
CollegeEntity targetEntity = entities.get(targetEntityIndex);
// Create column filter.
CompositeFilter columnFilter = all(
eq(NAME.columnName(), targetEntity.getName()),
eq(CREATED.columnName(), targetEntity.getCreationTime())
);
TargetFilters entityFilters = newTargetFilters(emptyIdFilter(),
columnFilter);
// Compose Query.
EntityQuery<CollegeId> entityQuery =
EntityQueries.from(entityFilters, emptyOrderBy(), emptyPagination(), storage);
// Execute Query.
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(1, resultList.size());
// Check the record state.
EntityRecord record = resultList.get(0);
assertEquals(targetEntity.getState(), unpack(record.getState()));
assertDsReadByStructuredQuery();
}
@Test
@DisplayName("with masked state")
void testFieldMaskApplied() {
// Create 10 entities and pick one for tests.
int recordCount = 10;
int targetEntityIndex = 7;
List<CollegeEntity> entities = createAndStoreEntities(storage, recordCount);
CollegeEntity targetEntity = entities.get(targetEntityIndex);
// Create column filter.
CompositeFilter columnFilter = all(
eq(NAME.columnName(), targetEntity.getName()),
eq(CREATED.columnName(), targetEntity.getCreationTime())
);
TargetFilters entityFilters = newTargetFilters(emptyIdFilter(),
columnFilter);
// Compose Query.
EntityQuery<CollegeId> query =
EntityQueries.from(entityFilters, emptyOrderBy(), emptyPagination(), storage);
// Execute Query.
FieldMask mask = DsRecordStorageTestEnv.newFieldMask("id", "name");
Iterator<EntityRecord> readResult = storage.readAll(query,
DsRecordStorageTestEnv.newFieldMask(
"id", "name"));
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(1, resultList.size());
// Check the record state.
EntityRecord record = resultList.get(0);
College expectedState = applyMask(mask, targetEntity.getState());
College actualState = (College) unpack(record.getState());
assertNotEquals(targetEntity.getState(), actualState);
assertEquals(expectedState, actualState);
assertDsReadByStructuredQuery();
}
@Test
@DisplayName("in descending sort order")
void testQueryWithDescendingOrder() {
// Create entities.
int expectedRecordCount = UNORDERED_COLLEGE_NAMES.size();
List<CollegeEntity> entities = createAndStoreEntities(storage, UNORDERED_COLLEGE_NAMES);
EntityQuery<CollegeId> entityQuery = EntityQueries.from(emptyFilters(),
descendingBy(NAME),
emptyPagination(),
storage);
// Execute Query.
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(expectedRecordCount, resultList.size());
// Check the entities were ordered.
List<CollegeId> expectedResults = reverse(sortedIds(entities, CollegeEntity::getName));
List<CollegeId> actualResults = recordIds(resultList);
assertEquals(expectedResults, actualResults);
assertDsReadByStructuredQuery();
}
@Test
@DisplayName("in an order specified by string field")
void testQueryWithOrderByString() {
testOrdering(NAME, CollegeEntity::getName);
}
@Test
@DisplayName("in order specified by double field")
void testQueryWithOrderByDouble() {
testOrdering(PASSING_GRADE, CollegeEntity::getPassingGrade);
}
@Test
@DisplayName("in order specified by timestamp field")
void testQueryWithOrderByTimestamp() {
testOrdering(ADMISSION_DEADLINE, entity -> entity.getAdmissionDeadline()
.getSeconds());
}
@Test
@DisplayName("in an order specified by integer")
void testQueryWithOrderByInt() {
testOrdering(STUDENT_COUNT, CollegeEntity::getStudentCount);
}
/**
* Uses local {@link SpyStorageFactory} so cannot be moved to test environment.
*/
private <T extends Comparable<T>> void
testOrdering(CollegeEntity.CollegeColumn column, Function<CollegeEntity, T> property) {
// Create entities.
int recordCount = UNORDERED_COLLEGE_NAMES.size();
List<CollegeEntity> entities = createAndStoreEntities(storage, UNORDERED_COLLEGE_NAMES);
// Compose Query.
EntityQuery<CollegeId> entityQuery = EntityQueries.from(emptyFilters(),
ascendingBy(column),
emptyPagination(),
storage);
// Execute Query.
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(recordCount, resultList.size());
// Check the entities were ordered.
List<CollegeId> expectedResults = sortedIds(entities, property);
List<CollegeId> actualResults = recordIds(resultList);
assertEquals(expectedResults, actualResults);
assertDsReadByStructuredQuery();
}
@Test
@DisplayName("in an order specified by boolean")
void testQueryWithOrderByBoolean() {
// Create entities.
int recordCount = 20;
createAndStoreEntities(storage, recordCount);
// Compose Query.
EntityQuery<CollegeId> entityQuery = EntityQueries.from(emptyFilters(),
ascendingBy(STATE_SPONSORED),
emptyPagination(),
storage);
// Execute Query.
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(recordCount, resultList.size());
// Check the entities were ordered.
List<Boolean> actualResults = getStateSponsoredValues(resultList);
assertSortedBooleans(actualResults);
assertDsReadByStructuredQuery();
}
@Test
@DisplayName("in specified order with nulls")
void testQueryWithOrderWithNulls() {
// Create entities.
int nullCount = 5;
int regularCount = 12;
int recordCount = regularCount + nullCount;
List<CollegeEntity> nullEntities =
createAndStoreEntitiesWithNullStudentCount(storage, nullCount);
List<CollegeEntity> regularEntities = createAndStoreEntities(storage, regularCount);
List<CollegeEntity> entities = combine(nullEntities, regularEntities);
// Compose Query.
EntityQuery<CollegeId> entityQuery = EntityQueries.from(emptyFilters(),
ascendingBy(STUDENT_COUNT),
emptyPagination(),
storage);
// Execute Query.k
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(recordCount, resultList.size());
// Check the entities were ordered.
List<Integer> expectedCounts = sortedValues(entities, CollegeEntity::getStudentCount);
List<Integer> actualCounts = nullableStudentCount(resultList);
assertEquals(expectedCounts, actualCounts);
assertDsReadByStructuredQuery();
}
@Test
@DisplayName("a specified number of entities")
void testQueryWithLimit() {
// Create entities.
int expectedRecordCount = 4;
List<CollegeEntity> entities = createAndStoreEntities(storage, UNORDERED_COLLEGE_NAMES);
// Compose Query.
EntityQuery<CollegeId> entityQuery = EntityQueries.from(emptyFilters(),
ascendingBy(NAME),
pagination(expectedRecordCount),
storage);
// Execute Query.
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> resultList = newArrayList(readResult);
assertEquals(expectedRecordCount, resultList.size());
// Check the entities were ordered.
List<CollegeId> sortedIds = sortedIds(entities, CollegeEntity::getName);
List<CollegeId> expectedResults = sortedIds.subList(0, expectedRecordCount);
List<CollegeId> actualResults = recordIds(resultList);
assertEquals(expectedResults, actualResults);
assertDsReadByStructuredQuery();
}
@Test
@DisplayName("with multiple Datastore reads")
void performsMultipleReads() {
createAndStoreEntities(storage, UNORDERED_COLLEGE_NAMES, 300, false);
createAndStoreEntities(storage, UNORDERED_COLLEGE_NAMES, 250, true);
createAndStoreEntities(storage, UNORDERED_COLLEGE_NAMES, 150, false);
List<CollegeEntity> entities = createAndStoreEntities(storage, UNORDERED_COLLEGE_NAMES,
150, true);
TargetFilters filters =
TargetFilters.newBuilder()
.addFilter(either(
lt(NAME.columnName(), UNORDERED_COLLEGE_NAMES.get(2)),
gt(NAME.columnName(), UNORDERED_COLLEGE_NAMES.get(2))
))
.addFilter(all(
eq(STATE_SPONSORED.columnName(), true),
eq(STUDENT_COUNT.columnName(), 150)
))
.build();
int recordCount = 5;
EntityQuery<CollegeId> query = EntityQueries.from(filters, ascendingBy(NAME),
pagination(recordCount), storage);
Iterator<EntityRecord> recordIterator = storage.readAll(query, emptyFieldMask());
List<EntityRecord> resultList = newArrayList(recordIterator);
assertEquals(recordCount, resultList.size());
// Check the entities were ordered.
entities.remove(2);
List<CollegeId> expectedResults = sortedIds(entities, CollegeEntity::getName);
List<CollegeId> actualResults = recordIds(resultList);
assertEquals(expectedResults, actualResults);
assertDsReadByStructuredQuery(2);
}
@Test
@DisplayName("returning single entity for multiple `EITHER` filter matches")
void testEitherFilterDuplicates() {
CollegeEntity entity = createAndStoreEntity(storage);
// Create `EITHER` column filter.
CompositeFilter eitherFilter = either(
eq(NAME.columnName(), entity.getName()),
eq(PASSING_GRADE.columnName(), entity.getPassingGrade())
);
TargetFilters entityFilters = newTargetFilters(emptyIdFilter(), eitherFilter);
// Compose Query.
EntityQuery<CollegeId> entityQuery =
EntityQueries.from(entityFilters, emptyOrderBy(), emptyPagination(), storage);
// Execute Query.
Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
// Check the query results.
List<EntityRecord> foundEntities = newArrayList(readResult);
assertEquals(1, foundEntities.size());
// Check the record state.
EntityRecord record = foundEntities.get(0);
assertEquals(entity.getState(), unpack(record.getState()));
assertDsReadByStructuredQuery(2);
}
private void assertDsReadByStructuredQuery() {
assertDsReadByStructuredQuery(1);
}
private void assertDsReadByStructuredQuery(int invocationCount) {
DatastoreWrapper spy = storageFactory.getDatastore();
verify(spy, never()).read(anyIterable());
//noinspection unchecked OK for a generic class assignment in tests.
verify(spy, times(invocationCount)).read(any(StructuredQuery.class));
}
}
/**
* A {@link TestDatastoreStorageFactory} which spies on its {@link DatastoreWrapper}.
*
* This class is not moved to the
* {@linkplain io.spine.server.storage.datastore.given.DsRecordStorageTestEnv test environment}
* because it uses package-private method of {@link DatastoreWrapper}.
*/
private static class SpyStorageFactory extends TestDatastoreStorageFactory {
private static DatastoreWrapper spyWrapper = null;
private static void injectWrapper(DatastoreWrapper wrapper) {
spyWrapper = spy(wrapper);
}
private SpyStorageFactory() {
super(spyWrapper.getDatastore());
}
@Override
protected DatastoreWrapper createDatastoreWrapper(Builder builder) {
return spyWrapper;
}
}
}
| Clarify duplication test comments
| datastore/src/test/java/io/spine/server/storage/datastore/DsRecordStorageTest.java | Clarify duplication test comments | <ide><path>atastore/src/test/java/io/spine/server/storage/datastore/DsRecordStorageTest.java
<ide> // Execute Query.
<ide> Iterator<EntityRecord> readResult = storage.readAll(entityQuery, emptyFieldMask());
<ide>
<del> // Check the query results.
<add> // Check the entity is "found" only once.
<ide> List<EntityRecord> foundEntities = newArrayList(readResult);
<ide> assertEquals(1, foundEntities.size());
<ide>
<del> // Check the record state.
<add> // Check it's the target entity.
<ide> EntityRecord record = foundEntities.get(0);
<ide> assertEquals(entity.getState(), unpack(record.getState()));
<ide>
<add> // Check there were actually 2 Datastore reads.
<ide> assertDsReadByStructuredQuery(2);
<ide> }
<ide> |
|
Java | apache-2.0 | 05d3d42e29c606fb1ca6f5e263e20cd85d9ba468 | 0 | rndsolutions/hawkcd,rndsolutions/hawkcd,rndsolutions/hawkcd,rndsolutions/hawkcd,rndsolutions/hawkcd | package net.hawkengine.model;
import java.time.LocalDateTime;
import java.util.HashSet;
import java.util.Set;
public class Agent extends DbEntry {
private String name;
private String hostName;
private String ipAddress;
private String rootPath;
private String operatingSystem;
private Set<String> resources;
private Environment environment;
private boolean isRunning;
private boolean isEnabled;
private boolean isConnected;
private boolean isAssigned;
private LocalDateTime lastReportedTime;
public Agent() {
this.setResources(new HashSet<>());
}
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
public String getHostName() {
return this.hostName;
}
public void setHostName(String hostName) {
this.hostName = hostName;
}
public String getIpAddress() {
return this.ipAddress;
}
public void setIpAddress(String ipAddress) {
this.ipAddress = ipAddress;
}
public String getRootPath() {
return this.rootPath;
}
public void setRootPath(String rootPath) {
this.rootPath = rootPath;
}
public String getOperatingSystem() {
return this.operatingSystem;
}
public void setOperatingSystem(String operatingSystem) {
this.operatingSystem = operatingSystem;
}
public Set<String> getResources() {
return this.resources;
}
public void setResources(Set<String> resources) {
this.resources = resources;
}
public Environment getEnvironment() {
return this.environment;
}
public void setEnvironment(Environment environment) {
this.environment = environment;
}
public boolean isRunning() {
return this.isRunning;
}
public void setRunning(boolean running) {
this.isRunning = running;
}
public boolean isEnabled() {
return this.isEnabled;
}
public void setEnabled(boolean enabled) {
this.isEnabled = enabled;
}
public boolean isConnected() {
return this.isConnected;
}
public void setConnected(boolean connected) {
this.isConnected = connected;
}
public boolean isAssigned() {
return isAssigned;
}
public void setAssigned(boolean assigned) {
isAssigned = assigned;
}
public LocalDateTime getLastReportedTime() {
return this.lastReportedTime;
}
public void setLastReportedTime(LocalDateTime lastReportedTime) {
this.lastReportedTime = lastReportedTime;
}
} | src/hawkengine/src/main/java/net/hawkengine/model/Agent.java | package net.hawkengine.model;
import java.time.LocalDateTime;
import java.util.HashSet;
import java.util.Set;
public class Agent extends DbEntry {
private String name;
private String hostName;
private String ipAddress;
private String rootPath;
private Object operatingSystem;
private Set<String> resources;
private Environment environment;
private boolean isRunning;
private boolean isEnabled;
private boolean isConnected;
private LocalDateTime lastReported;
public Agent() {
this.setResources(new HashSet<>());
}
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
public String getHostName() {
return this.hostName;
}
public void setHostName(String hostName) {
this.hostName = hostName;
}
public String getIpAddress() {
return this.ipAddress;
}
public void setIpAddress(String ipAddress) {
this.ipAddress = ipAddress;
}
public String getRootPath() {
return this.rootPath;
}
public void setRootPath(String rootPath) {
this.rootPath = rootPath;
}
public Object getOperatingSystem() {
return this.operatingSystem;
}
public void setOperatingSystem(Object operatingSystem) {
this.operatingSystem = operatingSystem;
}
public Set<String> getResources() {
return this.resources;
}
public void setResources(Set<String> resources) {
this.resources = resources;
}
public Environment getEnvironment() {
return this.environment;
}
public void setEnvironment(Environment environment) {
this.environment = environment;
}
public boolean isRunning() {
return this.isRunning;
}
public void setRunning(boolean running) {
this.isRunning = running;
}
public boolean isEnabled() {
return this.isEnabled;
}
public void setEnabled(boolean enabled) {
this.isEnabled = enabled;
}
public boolean isConnected() {
return this.isConnected;
}
public void setConnected(boolean connected) {
this.isConnected = connected;
}
public LocalDateTime getLastReported() {
return this.lastReported;
}
public void setLastReported(LocalDateTime lastReported) {
this.lastReported = lastReported;
}
} | Added additional field
| src/hawkengine/src/main/java/net/hawkengine/model/Agent.java | Added additional field | <ide><path>rc/hawkengine/src/main/java/net/hawkengine/model/Agent.java
<ide> private String hostName;
<ide> private String ipAddress;
<ide> private String rootPath;
<del> private Object operatingSystem;
<add> private String operatingSystem;
<ide> private Set<String> resources;
<ide> private Environment environment;
<ide> private boolean isRunning;
<ide> private boolean isEnabled;
<ide> private boolean isConnected;
<del> private LocalDateTime lastReported;
<add> private boolean isAssigned;
<add> private LocalDateTime lastReportedTime;
<ide>
<ide> public Agent() {
<ide> this.setResources(new HashSet<>());
<ide> this.rootPath = rootPath;
<ide> }
<ide>
<del> public Object getOperatingSystem() {
<add> public String getOperatingSystem() {
<ide> return this.operatingSystem;
<ide> }
<ide>
<del> public void setOperatingSystem(Object operatingSystem) {
<add> public void setOperatingSystem(String operatingSystem) {
<ide> this.operatingSystem = operatingSystem;
<ide> }
<ide>
<ide> this.isConnected = connected;
<ide> }
<ide>
<del> public LocalDateTime getLastReported() {
<del> return this.lastReported;
<add> public boolean isAssigned() {
<add> return isAssigned;
<ide> }
<ide>
<del> public void setLastReported(LocalDateTime lastReported) {
<del> this.lastReported = lastReported;
<add> public void setAssigned(boolean assigned) {
<add> isAssigned = assigned;
<add> }
<add>
<add> public LocalDateTime getLastReportedTime() {
<add> return this.lastReportedTime;
<add> }
<add>
<add> public void setLastReportedTime(LocalDateTime lastReportedTime) {
<add> this.lastReportedTime = lastReportedTime;
<ide> }
<ide> } |
|
Java | apache-2.0 | 8e3b05f2ab3f98c531d5d45e9a54307657d9add4 | 0 | ST-DDT/CrazyCore | package de.st_ddt.crazyplugin;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.URL;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import org.bukkit.Bukkit;
import org.bukkit.command.CommandSender;
import org.bukkit.command.PluginCommand;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.entity.Player;
import de.st_ddt.crazyplugin.commands.CrazyCommandTreeExecutor;
import de.st_ddt.crazyplugin.commands.CrazyPluginCommandMainTree;
import de.st_ddt.crazyplugin.tasks.LanguageLoadTask;
import de.st_ddt.crazyutil.ChatHelper;
import de.st_ddt.crazyutil.ChatHelperExtended;
import de.st_ddt.crazyutil.CrazyLogger;
import de.st_ddt.crazyutil.ListFormat;
import de.st_ddt.crazyutil.VersionComparator;
import de.st_ddt.crazyutil.locales.CrazyLocale;
import de.st_ddt.crazyutil.locales.Localized;
import de.st_ddt.crazyutil.modules.permissions.PermissionModule;
public abstract class CrazyPlugin extends CrazyLightPlugin implements CrazyPluginInterface
{
private static final LinkedHashMap<Class<? extends CrazyPlugin>, CrazyPlugin> plugins = new LinkedHashMap<Class<? extends CrazyPlugin>, CrazyPlugin>();
protected final CrazyLogger logger = new CrazyLogger(this);
protected final CrazyCommandTreeExecutor<CrazyPluginInterface> mainCommand = new CrazyPluginCommandMainTree(this);
protected CrazyLocale locale = null;
protected String previousVersion = "0";
protected String updateVersion = "0";
protected boolean isUpdated = false;
protected boolean isInstalled = false;
public static Collection<CrazyPlugin> getCrazyPlugins()
{
return plugins.values();
}
public final static CrazyPlugin getPlugin(final Class<? extends CrazyPlugin> plugin)
{
return plugins.get(plugin);
}
public final static CrazyPlugin getPlugin(final String name)
{
for (final CrazyPlugin plugin : plugins.values())
if (plugin.getName().equalsIgnoreCase(name))
return plugin;
return null;
}
@Override
public final boolean isInstalled()
{
return isInstalled;
}
@Override
public final boolean isUpdated()
{
return isUpdated;
}
@Override
public CrazyCommandTreeExecutor<CrazyPluginInterface> getMainCommand()
{
return mainCommand;
}
@Override
public void onLoad()
{
plugins.put(this.getClass(), this);
getDataFolder().mkdir();
new File(getDataFolder().getPath() + "/lang").mkdirs();
checkLocale();
final ConfigurationSection config = getConfig();
previousVersion = config.getString("version", "0");
isInstalled = previousVersion.equals("0");
isUpdated = !previousVersion.equals(getDescription().getVersion());
config.set("version", getDescription().getVersion());
super.onLoad();
}
@Override
@Localized("CRAZYPLUGIN.UPDATED $Name$ $Version$")
public void onEnable()
{
if (isUpdated)
broadcastLocaleMessage("UPDATED", getName(), getDescription().getVersion());
load();
if (isUpdated)
save();
super.onEnable();
final PluginCommand command = getCommand(getName());
if (command != null)
command.setExecutor(mainCommand);
}
@Override
public void onDisable()
{
save();
super.onDisable();
}
@Override
public void load()
{
loadConfiguration();
}
@Override
public void loadConfiguration()
{
}
@Override
public void save()
{
saveConfiguration();
}
@Override
public void saveConfiguration()
{
logger.save(getConfig(), "logs.");
saveConfig();
}
@SuppressWarnings("deprecation")
@Override
@Localized("CRAZYPLUGIN.PLUGININFO.UPDATE $NewVersion$")
public void show(final CommandSender target, final String chatHeader, final boolean showDetailed)
{
super.show(target, chatHeader, showDetailed);
final CrazyLocale locale = CrazyLocale.getLocaleHead().getSecureLanguageEntry("CRAZYPLUGIN.PLUGININFO");
Bukkit.getScheduler().scheduleAsyncDelayedTask(this, new Runnable()
{
@Override
public void run()
{
if (checkForUpdate(false))
ChatHelper.sendMessage(target, chatHeader, locale.getLanguageEntry("UPDATE"), updateVersion);
}
});
}
public void checkLocale()
{
locale = CrazyLocale.getPluginHead(this);
locale.setAlternative(CrazyLocale.getLocaleHead().getLanguageEntry("CRAZYPLUGIN"));
}
@Override
public final void sendLocaleMessage(final String localepath, final CommandSender target, final Object... args)
{
sendLocaleMessage(getLocale().getLanguageEntry(localepath), target, args);
}
@Override
public final void sendLocaleMessage(final CrazyLocale locale, final CommandSender target, final Object... args)
{
ChatHelper.sendMessage(target, getChatHeader(), locale, args);
}
@Override
public final void sendLocaleMessage(final String localepath, final CommandSender[] targets, final Object... args)
{
sendLocaleMessage(getLocale().getLanguageEntry(localepath), targets, args);
}
@Override
public final void sendLocaleMessage(final CrazyLocale locale, final CommandSender[] targets, final Object... args)
{
ChatHelper.sendMessage(targets, getChatHeader(), locale, args);
}
@Override
public final void sendLocaleMessage(final String localepath, final Collection<? extends CommandSender> targets, final Object... args)
{
sendLocaleMessage(getLocale().getLanguageEntry(localepath), targets, args);
}
@Override
public final void sendLocaleMessage(final CrazyLocale locale, final Collection<? extends CommandSender> targets, final Object... args)
{
ChatHelper.sendMessage(targets, getChatHeader(), locale, args);
}
@Override
public void sendLocaleList(final CommandSender target, final ListFormat format, final int amount, final int page, final List<?> datas)
{
ChatHelperExtended.sendList(target, getChatHeader(), format, amount, page, datas);
}
@Override
public final void sendLocaleList(final CommandSender target, final String formatPath, final int amount, final int page, final List<?> datas)
{
sendLocaleList(target, formatPath + ".HEADER", formatPath + ".LISTFORMAT", formatPath + ".ENTRYFORMAT", amount, page, datas);
}
@Override
public final void sendLocaleList(final CommandSender target, final String headFormatPath, final String listFormatPath, final String entryFormatPath, final int amount, final int page, final List<?> datas)
{
CrazyLocale headFormat = null;
if (headFormatPath != null)
headFormat = getLocale().getLanguageEntry(headFormatPath);
CrazyLocale listFormat = null;
if (listFormatPath != null)
listFormat = getLocale().getLanguageEntry(listFormatPath);
CrazyLocale entryFormat = null;
if (entryFormatPath != null)
entryFormat = getLocale().getLanguageEntry(entryFormatPath);
sendLocaleList(target, headFormat, listFormat, entryFormat, amount, page, datas);
}
@Override
@Localized({ "CRAZYPLUGIN.LIST.HEADER $CurrentPage$ $MaxPage$ $ChatHeader$ $DateTime$", "CRAZYPLUGIN.LIST.LISTFORMAT $Index$ $Entry$ $ChatHeader$", "CRAZYPLUGIN.LIST.ENTRYFORMAT" })
public final void sendLocaleList(final CommandSender target, CrazyLocale headFormat, CrazyLocale listFormat, CrazyLocale entryFormat, final int amount, final int page, final List<?> datas)
{
if (headFormat == null)
headFormat = getLocale().getLanguageEntry("LIST.HEADER");
if (listFormat == null)
listFormat = getLocale().getLanguageEntry("LIST.LISTFORMAT");
if (entryFormat == null)
entryFormat = getLocale().getLanguageEntry("LIST.ENTRYFORMAT");
ChatHelperExtended.sendList(target, getChatHeader(), headFormat.getLanguageText(target), listFormat.getLanguageText(target), entryFormat.getLanguageText(target), amount, page, datas);
}
@Override
public final void broadcastLocaleMessage(final String localepath, final Object... args)
{
broadcastLocaleMessage(getLocale().getLanguageEntry(localepath), args);
}
@Override
public final void broadcastLocaleMessage(final CrazyLocale locale, final Object... args)
{
sendLocaleMessage(locale, Bukkit.getConsoleSender(), args);
sendLocaleMessage(locale, Bukkit.getOnlinePlayers(), args);
}
@Override
public final void broadcastLocaleMessage(final boolean console, final String permission, final String localepath, final Object... args)
{
broadcastLocaleMessage(console, permission, getLocale().getLanguageEntry(localepath), args);
}
@Override
public final void broadcastLocaleMessage(final boolean console, final String permission, final CrazyLocale locale, final Object... args)
{
if (permission == null)
broadcastLocaleMessage(console, new String[] {}, locale, args);
else
broadcastLocaleMessage(console, new String[] { permission }, locale, args);
}
@Override
public final void broadcastLocaleMessage(final boolean console, final String[] permissions, final String localepath, final Object... args)
{
broadcastLocaleMessage(console, permissions, getLocale().getLanguageEntry(localepath), args);
}
@Override
public final void broadcastLocaleMessage(final boolean console, final String[] permissions, final CrazyLocale locale, final Object... args)
{
if (console)
sendLocaleMessage(locale, Bukkit.getConsoleSender(), args);
Player: for (final Player player : Bukkit.getOnlinePlayers())
{
for (final String permission : permissions)
if (!PermissionModule.hasPermission(player, permission))
continue Player;
sendLocaleMessage(locale, player, args);
}
}
@Override
public final CrazyLogger getCrazyLogger()
{
return logger;
}
@Override
public final CrazyLocale getLocale()
{
return locale;
}
protected boolean isSupportingLanguages()
{
return true;
}
public final void loadLanguage(final String language)
{
loadLanguage(language, Bukkit.getConsoleSender());
}
@SuppressWarnings("deprecation")
public void loadLanguageDelayed(final String language, final CommandSender sender)
{
getServer().getScheduler().scheduleAsyncDelayedTask(this, new LanguageLoadTask(this, language, sender));
}
@Localized({ "CRAZYPLUGIN.LANGUAGE.ERROR.AVAILABLE $Language$ $Plugin$", "CRAZYPLUGIN.LANGUAGE.ERROR.READ $Language$ $Plugin$" })
public void loadLanguage(final String language, final CommandSender sender)
{
if (!isSupportingLanguages())
return;
// default files
File file = new File(getDataFolder().getPath() + "/lang/" + language + ".lang");
if (!file.exists())
{
downloadLanguage(language);
if (!file.exists())
{
unpackLanguage(language);
if (!file.exists())
{
sendLocaleMessage("LANGUAGE.ERROR.AVAILABLE", sender, language, getName());
return;
}
}
}
try
{
loadLanguageFile(language, file);
}
catch (final IOException e)
{
sendLocaleMessage("LANGUAGE.ERROR.READ", sender, language, getName());
}
// Custom files:
file = new File(getDataFolder().getPath() + "/lang/custom_" + language + ".lang");
if (file.exists())
try
{
loadLanguageFile(language, file);
}
catch (final IOException e)
{
sendLocaleMessage("LANGUAGE.ERROR.READ", sender, language + " (Custom)", getName());
}
}
public String getMainDownloadLocation()
{
return "https://raw.github.com/ST-DDT/Crazy/master/" + getName() + "/src/resource";
}
public final void downloadLanguage(final String language)
{
downloadLanguage(language, Bukkit.getConsoleSender());
}
@Localized("CRAZYPLUGIN.LANGUAGE.ERROR.DOWNLOAD $Language$ $Plugin$")
public void downloadLanguage(final String language, final CommandSender sender)
{
try
{
BufferedInputStream in = null;
FileOutputStream out = null;
try
{
final InputStream stream = new URL(getMainDownloadLocation() + "/lang/" + language + ".lang").openStream();
if (stream == null)
return;
in = new BufferedInputStream(stream);
out = new FileOutputStream(getDataFolder().getPath() + "/lang/" + language + ".lang");
final byte data[] = new byte[1024];
int count;
while ((count = in.read(data, 0, 1024)) != -1)
out.write(data, 0, count);
out.flush();
}
finally
{
if (in != null)
in.close();
if (out != null)
out.close();
}
}
catch (final IOException e)
{
sendLocaleMessage("LANGUAGE.ERROR.DOWNLOAD", sender, language, getName());
}
}
public final void updateLanguage(final String language, final boolean reload)
{
updateLanguage(language, Bukkit.getConsoleSender(), reload);
}
@Localized({ "CRAZYPLUGIN.LANGUAGE.ERROR.AVAILABLE $Language$ $Plugin$", "CRAZYPLUGIN.LANGUAGE.ERROR.READ $Language$ $Plugin$" })
public void updateLanguage(final String language, final CommandSender sender, final boolean reload)
{
if (!isSupportingLanguages())
return;
final File file = new File(getDataFolder().getPath() + "/lang/" + language + ".lang");
downloadLanguage(language);
if (!file.exists())
{
unpackLanguage(language);
if (!file.exists())
{
sendLocaleMessage("LANGUAGE.ERROR.AVAILABLE", sender, language, getName());
return;
}
}
if (reload)
try
{
loadLanguageFile(language, file);
}
catch (final IOException e)
{
sendLocaleMessage("LANGUAGE.ERROR.READ", sender, language, getName());
}
}
public void unpackLanguage(final String language)
{
unpackLanguage(language, getServer().getConsoleSender());
}
@Localized("CRAZYPLUGIN.LANGUAGE.ERROR.EXTRACT $Language$ $Plugin$")
public void unpackLanguage(final String language, final CommandSender sender)
{
try
{
InputStream stream = null;
InputStream in = null;
OutputStream out = null;
try
{
stream = getClass().getResourceAsStream("/resource/lang/" + language + ".lang");
if (stream == null)
return;
in = new BufferedInputStream(stream);
out = new BufferedOutputStream(new FileOutputStream(getDataFolder().getPath() + "/lang/" + language + ".lang"));
final byte data[] = new byte[1024];
int count;
while ((count = in.read(data, 0, 1024)) != -1)
out.write(data, 0, count);
out.flush();
}
finally
{
if (out != null)
out.close();
if (stream != null)
stream.close();
if (in != null)
in.close();
}
}
catch (final IOException e)
{
sendLocaleMessage("LANGUAGE.ERROR.EXTRACT", sender, language, getName());
}
}
public void loadLanguageFile(final String language, final File file) throws IOException
{
InputStream stream = null;
InputStreamReader reader = null;
try
{
stream = new FileInputStream(file);
reader = new InputStreamReader(stream, "UTF-8");
CrazyLocale.readFile(language, reader);
}
finally
{
if (reader != null)
reader.close();
if (stream != null)
stream.close();
}
}
@Override
public String getUpdateVersion()
{
return updateVersion;
}
@Override
public boolean checkForUpdate(final boolean force)
{
if (!force)
{
if (updateVersion == null)
return false;
if (!updateVersion.equals("0"))
{
final int value = VersionComparator.compareVersions(this, updateVersion);
if (value == 1)
return false;
else if (value == -1)
return true;
}
}
try
{
BufferedReader bufreader = null;
try
{
final InputStream stream = new URL("http://dev.bukkit.org/server-mods/" + getName().toLowerCase() + "/files.rss").openStream();
if (stream == null)
{
updateVersion = null;
return false;
}
bufreader = new BufferedReader(new InputStreamReader(stream, "UTF-8"));
String zeile = null;
boolean active = false;
while ((zeile = bufreader.readLine()) != null)
{
zeile = zeile.trim();
if (active && zeile.startsWith("<title>"))
{
updateVersion = zeile.substring(7 + getName().length() + 2).split("<")[0];
break;
}
else if (zeile.equals("<item>"))
active = true;
else
continue;
}
}
finally
{
if (bufreader != null)
bufreader.close();
}
}
catch (final Exception e)
{
updateVersion = null;
return false;
}
return VersionComparator.compareVersions(this, updateVersion) == -1;
}
}
| src/de/st_ddt/crazyplugin/CrazyPlugin.java | package de.st_ddt.crazyplugin;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.URL;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import org.bukkit.Bukkit;
import org.bukkit.command.CommandSender;
import org.bukkit.command.PluginCommand;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.entity.Player;
import de.st_ddt.crazyplugin.commands.CrazyCommandTreeExecutor;
import de.st_ddt.crazyplugin.commands.CrazyPluginCommandMainTree;
import de.st_ddt.crazyplugin.tasks.LanguageLoadTask;
import de.st_ddt.crazyutil.ChatHelper;
import de.st_ddt.crazyutil.ChatHelperExtended;
import de.st_ddt.crazyutil.CrazyLogger;
import de.st_ddt.crazyutil.ListFormat;
import de.st_ddt.crazyutil.VersionComparator;
import de.st_ddt.crazyutil.locales.CrazyLocale;
import de.st_ddt.crazyutil.locales.Localized;
import de.st_ddt.crazyutil.modules.permissions.PermissionModule;
public abstract class CrazyPlugin extends CrazyLightPlugin implements CrazyPluginInterface
{
private static final LinkedHashMap<Class<? extends CrazyPlugin>, CrazyPlugin> plugins = new LinkedHashMap<Class<? extends CrazyPlugin>, CrazyPlugin>();
protected final CrazyLogger logger = new CrazyLogger(this);
protected final CrazyCommandTreeExecutor<CrazyPluginInterface> mainCommand = new CrazyPluginCommandMainTree(this);
protected CrazyLocale locale = null;
protected String previousVersion = "0";
protected String updateVersion = "0";
protected boolean isUpdated = false;
protected boolean isInstalled = false;
public static Collection<CrazyPlugin> getCrazyPlugins()
{
return plugins.values();
}
public final static CrazyPlugin getPlugin(final Class<? extends CrazyPlugin> plugin)
{
return plugins.get(plugin);
}
public final static CrazyPlugin getPlugin(final String name)
{
for (final CrazyPlugin plugin : plugins.values())
if (plugin.getName().equalsIgnoreCase(name))
return plugin;
return null;
}
@Override
public final boolean isInstalled()
{
return isInstalled;
}
@Override
public final boolean isUpdated()
{
return isUpdated;
}
@Override
public CrazyCommandTreeExecutor<CrazyPluginInterface> getMainCommand()
{
return mainCommand;
}
@Override
public void onLoad()
{
plugins.put(this.getClass(), this);
getDataFolder().mkdir();
new File(getDataFolder().getPath() + "/lang").mkdirs();
checkLocale();
final ConfigurationSection config = getConfig();
previousVersion = config.getString("version", "0");
isInstalled = previousVersion.equals("0");
isUpdated = !previousVersion.equals(getDescription().getVersion());
config.set("version", getDescription().getVersion());
super.onLoad();
}
@Override
@Localized("CRAZYPLUGIN.UPDATED $Name$ $Version$")
public void onEnable()
{
if (isUpdated)
broadcastLocaleMessage("UPDATED", getName(), getDescription().getVersion());
load();
if (isUpdated)
save();
super.onEnable();
final PluginCommand command = getCommand(getName());
if (command != null)
command.setExecutor(mainCommand);
}
@Override
public void onDisable()
{
save();
super.onDisable();
}
@Override
public void load()
{
loadConfiguration();
}
@Override
public void loadConfiguration()
{
}
@Override
public void save()
{
saveConfiguration();
}
@Override
public void saveConfiguration()
{
logger.save(getConfig(), "logs.");
saveConfig();
}
@SuppressWarnings("deprecation")
@Override
@Localized("CRAZYPLUGIN.PLUGININFO.UPDATE $NewVersion$")
public void show(final CommandSender target, final String chatHeader, final boolean showDetailed)
{
super.show(target, chatHeader, showDetailed);
final CrazyLocale locale = CrazyLocale.getLocaleHead().getSecureLanguageEntry("CRAZYPLUGIN.PLUGININFO");
Bukkit.getScheduler().scheduleAsyncDelayedTask(this, new Runnable()
{
@Override
public void run()
{
if (checkForUpdate(false))
ChatHelper.sendMessage(target, chatHeader, locale.getLanguageEntry("UPDATE"), updateVersion);
}
});
}
public void checkLocale()
{
locale = CrazyLocale.getPluginHead(this);
locale.setAlternative(CrazyLocale.getLocaleHead().getLanguageEntry("CRAZYPLUGIN"));
}
@Override
public final void sendLocaleMessage(final String localepath, final CommandSender target, final Object... args)
{
sendLocaleMessage(getLocale().getLanguageEntry(localepath), target, args);
}
@Override
public final void sendLocaleMessage(final CrazyLocale locale, final CommandSender target, final Object... args)
{
ChatHelper.sendMessage(target, getChatHeader(), locale, args);
}
@Override
public final void sendLocaleMessage(final String localepath, final CommandSender[] targets, final Object... args)
{
sendLocaleMessage(getLocale().getLanguageEntry(localepath), targets, args);
}
@Override
public final void sendLocaleMessage(final CrazyLocale locale, final CommandSender[] targets, final Object... args)
{
ChatHelper.sendMessage(targets, getChatHeader(), locale, args);
}
@Override
public final void sendLocaleMessage(final String localepath, final Collection<? extends CommandSender> targets, final Object... args)
{
sendLocaleMessage(getLocale().getLanguageEntry(localepath), targets, args);
}
@Override
public final void sendLocaleMessage(final CrazyLocale locale, final Collection<? extends CommandSender> targets, final Object... args)
{
ChatHelper.sendMessage(targets, getChatHeader(), locale, args);
}
@Override
public void sendLocaleList(final CommandSender target, final ListFormat format, final int amount, final int page, final List<?> datas)
{
ChatHelperExtended.sendList(target, getChatHeader(), format, amount, page, datas);
}
@Override
public final void sendLocaleList(final CommandSender target, final String formatPath, final int amount, final int page, final List<?> datas)
{
sendLocaleList(target, formatPath + ".HEADER", formatPath + ".LISTFORMAT", formatPath + ".ENTRYFORMAT", amount, page, datas);
}
@Override
public final void sendLocaleList(final CommandSender target, final String headFormatPath, final String listFormatPath, final String entryFormatPath, final int amount, final int page, final List<?> datas)
{
CrazyLocale headFormat = null;
if (headFormatPath != null)
headFormat = getLocale().getLanguageEntry(headFormatPath);
CrazyLocale listFormat = null;
if (listFormatPath != null)
listFormat = getLocale().getLanguageEntry(listFormatPath);
CrazyLocale entryFormat = null;
if (entryFormatPath != null)
entryFormat = getLocale().getLanguageEntry(entryFormatPath);
sendLocaleList(target, headFormat, listFormat, entryFormat, amount, page, datas);
}
@Override
@Localized({ "CRAZYPLUGIN.LIST.HEADER $CurrentPage$ $MaxPage$ $ChatHeader$ $DateTime$", "CRAZYPLUGIN.LIST.LISTFORMAT $Index$ $Entry$ $ChatHeader$", "CRAZYPLUGIN.LIST.ENTRYFORMAT" })
public final void sendLocaleList(final CommandSender target, CrazyLocale headFormat, CrazyLocale listFormat, CrazyLocale entryFormat, final int amount, final int page, final List<?> datas)
{
if (headFormat == null)
headFormat = getLocale().getLanguageEntry("LIST.HEADER");
if (listFormat == null)
listFormat = getLocale().getLanguageEntry("LIST.LISTFORMAT");
if (entryFormat == null)
entryFormat = getLocale().getLanguageEntry("LIST.ENTRYFORMAT");
ChatHelperExtended.sendList(target, getChatHeader(), headFormat.getLanguageText(target), listFormat.getLanguageText(target), entryFormat.getLanguageText(target), amount, page, datas);
}
@Override
public final void broadcastLocaleMessage(final String localepath, final Object... args)
{
broadcastLocaleMessage(getLocale().getLanguageEntry(localepath), args);
}
@Override
public final void broadcastLocaleMessage(final CrazyLocale locale, final Object... args)
{
sendLocaleMessage(locale, Bukkit.getConsoleSender(), args);
sendLocaleMessage(locale, Bukkit.getOnlinePlayers(), args);
}
@Override
public final void broadcastLocaleMessage(final boolean console, final String permission, final String localepath, final Object... args)
{
broadcastLocaleMessage(console, permission, getLocale().getLanguageEntry(localepath), args);
}
@Override
public final void broadcastLocaleMessage(final boolean console, final String permission, final CrazyLocale locale, final Object... args)
{
if (permission == null)
broadcastLocaleMessage(console, new String[] {}, locale, args);
else
broadcastLocaleMessage(console, new String[] { permission }, locale, args);
}
@Override
public final void broadcastLocaleMessage(final boolean console, final String[] permissions, final String localepath, final Object... args)
{
broadcastLocaleMessage(console, permissions, getLocale().getLanguageEntry(localepath), args);
}
@Override
public final void broadcastLocaleMessage(final boolean console, final String[] permissions, final CrazyLocale locale, final Object... args)
{
if (console)
sendLocaleMessage(locale, Bukkit.getConsoleSender(), args);
Player: for (final Player player : Bukkit.getOnlinePlayers())
{
for (final String permission : permissions)
if (!PermissionModule.hasPermission(player, permission))
continue Player;
sendLocaleMessage(locale, player, args);
}
}
@Override
public final CrazyLogger getCrazyLogger()
{
return logger;
}
@Override
public final CrazyLocale getLocale()
{
return locale;
}
protected boolean isSupportingLanguages()
{
return true;
}
public final void loadLanguage(final String language)
{
loadLanguage(language, Bukkit.getConsoleSender());
}
@SuppressWarnings("deprecation")
public void loadLanguageDelayed(final String language, final CommandSender sender)
{
getServer().getScheduler().scheduleAsyncDelayedTask(this, new LanguageLoadTask(this, language, sender));
}
@Localized({ "CRAZYPLUGIN.LANGUAGE.ERROR.AVAILABLE $Language$ $Plugin$", "CRAZYPLUGIN.LANGUAGE.ERROR.READ $Language$ $Plugin$" })
public void loadLanguage(final String language, final CommandSender sender)
{
if (!isSupportingLanguages())
return;
// default files
File file = new File(getDataFolder().getPath() + "/lang/" + language + ".lang");
if (!file.exists())
{
downloadLanguage(language);
if (!file.exists())
{
unpackLanguage(language);
if (!file.exists())
{
sendLocaleMessage("LANGUAGE.ERROR.AVAILABLE", sender, language, getName());
return;
}
}
}
try
{
loadLanguageFile(language, file);
}
catch (final IOException e)
{
sendLocaleMessage("LANGUAGE.ERROR.READ", sender, language, getName());
}
// Custom files:
file = new File(getDataFolder().getPath() + "/lang/custom_" + language + ".lang");
if (file.exists())
try
{
loadLanguageFile(language, file);
}
catch (final IOException e)
{
sendLocaleMessage("LANGUAGE.ERROR.READ", sender, language + " (Custom)", getName());
}
}
public String getMainDownloadLocation()
{
return "https://raw.github.com/ST-DDT/Crazy/master/" + getName() + "/src/resource";
}
public final void downloadLanguage(final String language)
{
downloadLanguage(language, Bukkit.getConsoleSender());
}
@Localized("CRAZYPLUGIN.LANGUAGE.ERROR.DOWNLOAD $Language$ $Plugin$")
public void downloadLanguage(final String language, final CommandSender sender)
{
try
{
BufferedInputStream in = null;
FileOutputStream out = null;
try
{
final InputStream stream = new URL(getMainDownloadLocation() + "/lang/" + language + ".lang").openStream();
if (stream == null)
return;
in = new BufferedInputStream(stream);
out = new FileOutputStream(getDataFolder().getPath() + "/lang/" + language + ".lang");
final byte data[] = new byte[1024];
int count;
while ((count = in.read(data, 0, 1024)) != -1)
out.write(data, 0, count);
out.flush();
}
finally
{
if (in != null)
in.close();
if (out != null)
out.close();
}
}
catch (final IOException e)
{
sendLocaleMessage("LANGUAGE.ERROR.DOWNLOAD", sender, language, getName());
}
}
public final void updateLanguage(final String language, final boolean reload)
{
updateLanguage(language, Bukkit.getConsoleSender(), reload);
}
@Localized({ "CRAZYPLUGIN.LANGUAGE.ERROR.AVAILABLE $Language$ $Plugin$", "CRAZYPLUGIN.LANGUAGE.ERROR.READ $Language$ $Plugin$" })
public void updateLanguage(final String language, final CommandSender sender, final boolean reload)
{
if (!isSupportingLanguages())
return;
final File file = new File(getDataFolder().getPath() + "/lang/" + language + ".lang");
downloadLanguage(language);
if (!file.exists())
{
unpackLanguage(language);
if (!file.exists())
{
sendLocaleMessage("LANGUAGE.ERROR.AVAILABLE", sender, language, getName());
return;
}
}
if (reload)
try
{
loadLanguageFile(language, file);
}
catch (final IOException e)
{
sendLocaleMessage("LANGUAGE.ERROR.READ", sender, language, getName());
}
}
public void unpackLanguage(final String language)
{
unpackLanguage(language, getServer().getConsoleSender());
}
@Localized("CRAZYPLUGIN.LANGUAGE.ERROR.EXTRACT $Language$ $Plugin$")
public void unpackLanguage(final String language, final CommandSender sender)
{
try
{
InputStream stream = null;
InputStream in = null;
OutputStream out = null;
try
{
stream = getClass().getResourceAsStream("/resource/lang/" + language + ".lang");
if (stream == null)
return;
in = new BufferedInputStream(stream);
out = new BufferedOutputStream(new FileOutputStream(getDataFolder().getPath() + "/lang/" + language + ".lang"));
final byte data[] = new byte[1024];
int count;
while ((count = in.read(data, 0, 1024)) != -1)
out.write(data, 0, count);
out.flush();
}
finally
{
if (out != null)
out.close();
if (stream != null)
stream.close();
if (in != null)
in.close();
}
}
catch (final IOException e)
{
sendLocaleMessage("LANGUAGE.ERROR.EXTRACT", sender, language, getName());
}
}
public void loadLanguageFile(final String language, final File file) throws IOException
{
InputStream stream = null;
InputStreamReader reader = null;
try
{
stream = new FileInputStream(file);
reader = new InputStreamReader(stream, "UTF-8");
CrazyLocale.readFile(language, reader);
}
finally
{
if (reader != null)
reader.close();
if (stream != null)
stream.close();
}
}
@Override
public String getUpdateVersion()
{
return updateVersion;
}
@Override
public boolean checkForUpdate(final boolean force)
{
if (!force)
{
if (updateVersion == null)
return false;
if (!updateVersion.equals("0"))
{
final int value = VersionComparator.compareVersions(this, updateVersion);
if (value == 1)
return false;
else if (value == -1)
return true;
}
}
try
{
BufferedReader bufreader = null;
try
{
final InputStream stream = new URL("http://dev.bukkit.org/server-mods/" + getName().toLowerCase() + "/files.rss").openStream();
if (stream == null)
{
updateVersion = null;
return false;
}
bufreader = new BufferedReader(new InputStreamReader(stream, "UTF-8"));
String zeile = null;
boolean active = false;
while ((zeile = bufreader.readLine()) != null)
{
zeile = zeile.trim();
if (active)
{
updateVersion = zeile.substring(7 + getName().length() + 2).split("<")[0];
break;
}
else if (zeile.equals("<item>"))
active = true;
else
continue;
}
}
finally
{
if (bufreader != null)
bufreader.close();
}
}
catch (final Exception e)
{
updateVersion = null;
return false;
}
return VersionComparator.compareVersions(this, updateVersion) == -1;
}
}
| CrazyCore: fixed issue when no file uploads are found | src/de/st_ddt/crazyplugin/CrazyPlugin.java | CrazyCore: fixed issue when no file uploads are found | <ide><path>rc/de/st_ddt/crazyplugin/CrazyPlugin.java
<ide> while ((zeile = bufreader.readLine()) != null)
<ide> {
<ide> zeile = zeile.trim();
<del> if (active)
<add> if (active && zeile.startsWith("<title>"))
<ide> {
<ide> updateVersion = zeile.substring(7 + getName().length() + 2).split("<")[0];
<ide> break; |
|
JavaScript | mit | b5ab7f56df5bcc9d6459b59c9116e2256eaaf5be | 0 | dbsoftcombr/dbsfaces,dbsoftcombr/dbsfaces,dbsoftcombr/dbsfaces | (function($){
var $chk = function(obj){
return !!(obj || obj === 0);
};
var dbsmask = function(){
this.initialize.apply(this, arguments);
};
dbsmask.prototype = {
options: {
maskEmptyChr : ' ',
validNumbers : "1234567890",
validAlphas : "abcdefghijklmnopqrstuvwxyz",
validAlphaNums : "abcdefghijklmnopqrstuvwxyz1234567890",
groupDigits : 3,
decDigits : 2,
currencySymbol : '',
groupSymbol : ',',
decSymbol : '.',
showMask : true,
stripMask : false,
lastFocus : 0,
oldValue : '',
number : {
stripMask : false,
showMask : false
}
},
initialize: function(node, options) {
this.node = node;
this.domNode = node[0];
this.options = $.extend({}, this.options, this.options[options.type] || {}, options);
var self = this;
this.node
// .bind( "mousedown click", function(ev){ ev.stopPropagation(); ev.preventDefault(); } )
.bind( "mouseup", function(){ self.onMouseUp .apply(self, arguments); } )
.bind( "keypress", function(){ self.onKeyPress.apply(self, arguments); } )
.bind( "keydown", function(){ self.onKeyDown .apply(self, arguments); } )
.bind( "focus", function(){ self.onFocus .apply(self, arguments); } )
.bind( "blur", function(){ self.onBlur .apply(self, arguments); } );
},
isFixed : function(){ return this.options.type == 'fixed'; },
isNumber : function(){ return this.options.type == 'number'; },
onMouseUp: function( ev ) {
ev.stopPropagation();
ev.preventDefault();
//
if( this.isFixed() ) {
var p = this.getSelectionStart();
this.setSelection(p, (p + 1));
} else if(this.isNumber() ) {
// this.setEnd();
}
},
onKeyDown: function(ev) {
if (ev.dbs){return;}
if (ev.altKey && ev.which == 18){ //Option + Delete
ev.preventDefault();
return;
}else if(ev.ctrlKey || ev.altKey || ev.metaKey) {
return;
} else if(ev.which == 13) { // enter
// this.node.blur(); Comentado em 6/ago/2013 para evitar salto do campo e o submit
// this.submitForm(this.node);
} else if(!(ev.which == 9)) { // se não tab
if(this.options.type == "fixed") {
ev.preventDefault();
var p = this.getSelectionStart();
switch(ev.which) {
case 8: // Backspace
this.updateSelection( this.options.maskEmptyChr );
this.selectPrevious();
break;
case 36: // Home
this.selectFirst();
break;
case 35: // End
this.selectLast();
break;
case 37: // Left
case 38: // Up
this.selectPrevious();
break;
case 39: // Right
case 40: // Down
this.selectNext();
break;
case 46: // Delete
this.updateSelection( this.options.maskEmptyChr );
this.selectNext();
break;
case 173: // MENOS
this.setSignal(ev);
break;
case 189: // MENOS
this.setSignal(ev);
break;
default:
var chr = this.chrFromEv(ev);
if( this.isViableInput( p, chr ) ) {
this.updateSelection( ev.shiftKey ? chr.toUpperCase() : chr );
this.node.trigger("valid", ev, this.node);
this.selectNext();
} else {
this.node.trigger("invalid", ev, this.node);
}
break;
}
} else if(this.options.type == "number") {
switch(ev.which) {
case 16: // END
case 35: // END
case 36: // HOME
case 37: // LEFT
case 38: // UP
case 39: // RIGHT
case 40: // DOWN
break;
case 8: // backspace
case 46: // delete
var xStart = this.getSelectionStart();
var xEnd = this.getSelectionEnd();
if (ev.which == 8){ //Se for backspace
//Se não houver seleção manual
if (xStart == xEnd){
//Se caracter anterior for pontuação, inclui seleção da pontuação para ser excluido também
var xChar = this.domNode.value.charAt(xStart - 1);
if (xChar == this.options.groupSymbol){
xStart-=2;
this.setSelection(xStart, xEnd);
}else if (xChar == this.options.decSymbol){
xStart-=1;
this.setSelection(xStart, xStart);
}
}
}
//Inclui zeros a direita se estiver na digitação das casas decimais
if (this.isInputDecimals(ev)){
var xZeros = "0";
for(var len = this.getSelectionStart() + 1, i = this.getSelectionEnd(); len < i; len++) {
xZeros += "0";
}
this.domNode.value += xZeros;
this.setSelection(xStart, xEnd);
}
var self = this;
setTimeout(function(){
self.formatNumber();
}, 1);
break;
case 173: // MENOS
this.setSignal(ev);
break;
case 189: // MENOS
this.setSignal(ev);
break;
case 188: // VIRGULA
this.moveToDecimalPosition(ev);
break;
case 190: // PONTO
this.moveToDecimalPosition(ev);
break;
default:
ev.preventDefault();
//Se campo inteiro estiver selecionado, apaga conteúdo e posiciona na parte inteira
if (this.domNode.value.length == (this.getSelectionEnd() - this.getSelectionStart())){
this.domNode.value = "";
this.formatNumber();
this.moveToIntegerPosition(ev);
}
if (this.isInputDecimals(ev)){
//Seleciona digito anterior para digitação caminha para a direita
var curpos = this.getSelectionStart();
if (curpos == this.getSelectionEnd()){
this.setSelection(curpos, curpos + 1);
}
};
var chr = this.chrFromEv( ev );
if( this.isViableInput( p, chr ) ) {
var range = new Range( this )
, val = this.sanityTest( range.replaceWith( chr ) );
if(val !== false){
this.updateSelection( chr );
this.formatNumber();
}
this.node.trigger( "valid", ev, this.node );
} else {
this.node.trigger( "invalid", ev, this.node );
}
break;
}
}
}
},
onKeyPress: function(ev) {
if (ev.altKey && ev.which == 18){ //Option + Delete
ev.preventDefault();
return;
}
var key = ev.which || ev.keyCode;
if(
!( this.allowKeys[ key ] )
&& !(ev.ctrlKey || ev.altKey || ev.metaKey)
) {
ev.preventDefault();
ev.stopPropagation();
}
},
allowKeys : {
8 : 1 // backspace
, 9 : 1 // tab
, 13 : 1 // enter
, 35 : 1 // end
, 36 : 1 // home
, 37 : 1 // left
, 38 : 1 // up
, 39 : 1 // right
, 40 : 1 // down
, 46 : 1 // delete
},
moveToDecimalPosition: function(ev){
ev.preventDefault();
if (this.options.decDigits > 0){
var curpos = this.domNode.value.indexOf(this.options.decSymbol) + 1;
this.setSelection(curpos, curpos);
}
},
moveToIntegerPosition: function(ev){
ev.preventDefault();
if (this.options.decDigits > 0){
var curpos = this.domNode.value.indexOf(this.options.decSymbol);
this.setSelection(curpos, curpos);
}
},
isInputDecimals: function(ev){
if (this.options.decDigits > 0){
var curpos = this.domNode.value.indexOf(this.options.decSymbol);
if (curpos < this.getSelectionStart()){
return true;
}else{
return false;
}
}
},
setSignal: function(ev){
ev.preventDefault();
ev.stopPropagation();
if ($(this.node).attr("minValue") < 0){
if ($(this.node).attr("n") == "-"){
$(this.node).attr("n","");
}else{
$(this.node).attr("n","-");
}
this.formatNumber();
}
},
onFocus: function(ev) {
//Salva valor atual para comparar com o novo
this.options.oldValue = this.domNode.value;
ev.stopPropagation();
ev.preventDefault();
this.options.showMask && (this.domNode.value = this.wearMask(this.domNode.value));
this.sanityTest( this.domNode.value );
var self = this;
setTimeout( function(){
self[ self.options.type === "fixed" ? 'selectFirst' : 'selectAll' ]();
}, 1 );
},
onBlur: function(ev) {
ev.stopPropagation();
ev.preventDefault();
if(this.options.stripMask){
this.domNode.value = this.stripMask();
}
//Dispara evento se valor foi alterado
if (this.options.oldValue != this.domNode.value){
$(this.node).trigger("change");
}
},
selectAll: function() {
this.setSelection(0, this.domNode.value.length);
},
selectFirst: function() {
for(var i = 0, len = this.options.mask.length; i < len; i++) {
if(this.isInputPosition(i)) {
this.setSelection(i, (i + 1));
return;
}
}
},
selectLast: function() {
for(var i = (this.options.mask.length - 1); i >= 0; i--) {
if(this.isInputPosition(i)) {
this.setSelection(i, (i + 1));
return;
}
}
},
selectPrevious: function(p) {
if( !$chk(p) ){ p = this.getSelectionStart(); }
if(p <= 0) {
this.selectFirst();
} else {
if(this.isInputPosition(p - 1)) {
this.setSelection(p - 1, p);
} else {
this.selectPrevious(p - 1);
}
}
},
selectNext: function(p) {
if( !$chk(p) ){ p = this.getSelectionEnd(); }
if( this.isNumber() ){
this.setSelection( p+1, p+1 );
return;
}
if( p >= this.options.mask.length) {
this.selectLast();
} else {
if(this.isInputPosition(p)) {
this.setSelection(p, (p + 1));
} else {
this.selectNext(p + 1);
}
}
},
setSelection: function( a, b ) {
a = a.valueOf();
if( !b && a.splice ){
b = a[1];
a = a[0];
}
if(this.domNode.setSelectionRange) {
this.domNode.focus();
this.domNode.setSelectionRange(a, b);
} else if(this.domNode.createTextRange) {
var r = this.domNode.createTextRange();
r.collapse();
r.moveStart("character", a);
r.moveEnd("character", (b - a));
r.select();
}
},
updateSelection: function( chr ) {
var value = this.domNode.value
, range = new Range( this )
, output = range.replaceWith( chr );
this.domNode.value = output;
if( range[0] === range[1] ){
this.setSelection( range[0] + 1, range[0] + 1 );
}else{
this.setSelection( range );
}
},
setEnd: function() {
var len = this.domNode.value.length - this.options.decDigits;
if (this.options.decDigits > 0){
len--;
}
this.setSelection(len, len);
},
getSelectionRange : function(){
return [ this.getSelectionStart(), this.getSelectionEnd() ];
},
getSelectionStart: function() {
var p = 0,
n = this.domNode.selectionStart;
if( n ) {
if( typeof( n ) == "number" ){
p = n;
}
} else if( document.selection ){
var r = document.selection.createRange().duplicate();
r.moveEnd( "character", this.domNode.value.length );
p = this.domNode.value.lastIndexOf( r.text );
if( r.text == "" ){
p = this.domNode.value.length;
}
}
return p;
},
getSelectionEnd: function() {
var p = 0,
n = this.domNode.selectionEnd;
if( n ) {
if( typeof( n ) == "number"){
p = n;
}
} else if( document.selection ){
var r = document.selection.createRange().duplicate();
r.moveStart( "character", -this.domNode.value.length );
p = r.text.length;
}
return p;
},
isInputPosition: function(p) {
var mask = this.options.mask.toLowerCase();
var chr = mask.charAt(p);
return !!~"9ax".indexOf(chr);
},
sanityTest: function( str, p ){
var sanity = this.options.sanity;
if(sanity instanceof RegExp){
return sanity.test(str);
}else if($.isFunction(sanity)){
var ret = sanity(str, p);
if(typeof(ret) == 'boolean'){
return ret;
}else if(typeof(ret) != 'undefined'){
if( this.isFixed() ){
var p = this.getSelectionStart();
this.domNode.value = this.wearMask( ret );
this.setSelection( p, p+1 );
this.selectNext();
}else if( this.isNumber() ){
var range = new Range( this );
this.domNode.value = ret;
this.setSelection( range );
this.formatNumber();
}
return false;
}
}
},
isViableInput: function() {
return this[ this.isFixed() ? 'isViableFixedInput' : 'isViableNumericInput' ].apply( this, arguments );
},
isViableFixedInput : function( p, chr ){
var mask = this.options.mask.toLowerCase();
var chMask = mask.charAt(p);
var val = this.domNode.value.split('');
val.splice( p, 1, chr );
val = val.join('');
var ret = this.sanityTest( val, p );
if(typeof(ret) == 'boolean'){ return ret; }
if(({
'9' : this.options.validNumbers,
'a' : this.options.validAlphas,
'x' : this.options.validAlphaNums
}[chMask] || '').indexOf(chr) >= 0){
return true;
}
return false;
},
isViableNumericInput : function( p, chr ){
return !!~this.options.validNumbers.indexOf( chr );
},
wearMask: function(str) {
var mask = this.options.mask.toLowerCase()
, output = ""
, chrSets = {
'9' : 'validNumbers'
, 'a' : 'validAlphas'
, 'x' : 'validAlphaNums'
};
for(var i = 0, u = 0, len = mask.length; i < len; i++) {
switch(mask.charAt(i)) {
case '9':
case 'a':
case 'x':
output +=
((this.options[ chrSets[ mask.charAt(i) ] ].indexOf( str.charAt(u).toLowerCase() ) >= 0) && ( str.charAt(u) != ""))
? str.charAt( u++ )
: this.options.maskEmptyChr;
break;
default:
output += mask.charAt(i);
if( str.charAt(u) == mask.charAt(i) ){
u++;
}
break;
}
}
return output;
},
stripMask: function() {
var value = this.domNode.value;
if("" == value) return "";
var output = "";
if( this.isFixed() ) {
for(var i = 0, len = value.length; i < len; i++) {
if((value.charAt(i) != this.options.maskEmptyChr) && (this.isInputPosition(i)))
{output += value.charAt(i);}
}
} else if( this.isNumber() ) {
for(var i = 0, len = value.length; i < len; i++) {
if(this.options.validNumbers.indexOf(value.charAt(i)) >= 0)
{output += value.charAt(i);}
}
}
return output;
},
chrFromEv: function(ev) {
//Limita o tamanho de digitos
var chr = '', key = ev.which;
var xL = parseFloat($(this.node).attr("maxlength"));
if (xL!="NaN"){
if (this.domNode.value.length >= xL){
ev.preventDefault();
ev.stopPropagation();
ev.stopImmediatePropagation();
return chr;
}
}
if(key >= 96 && key <= 105){ key -= 48; } // shift number-pad numbers to corresponding character codes
chr = String.fromCharCode(key).toLowerCase(); // key pressed as a lowercase string
return chr;
},
formatNumber: function() {
// stripLeadingZeros
var olen = this.domNode.value.length
, str2 = this.stripMask()
, str1 = str2.replace( /^0+/, '' )
, range = new Range(this)
, neg = ""
, decsymb = this.options.decSymbol
, curpos = olen - range["1"];
//apaga sinal se houver
str1 = str1.replace('-', '');
//Impeder a exibição do sinal quando o valor for vázio
if (str1 == ""){
$(this.node).attr("n","");
}
//Configura sinal se houver
if ($(this.node).attr("n") == "-"){
neg = "-";
}
// wearLeadingZeros
str2 = str1;
str1 = "";
for(var len = str2.length, i = this.options.decDigits; len <= i; len++) {
str1 += "0";
}
str1 += str2;
// decimalSymbol
str2 = str1.substr(str1.length - this.options.decDigits);
str1 = str1.substring(0, (str1.length - this.options.decDigits));
//Verifica intervalo dos valores
var xValue = parseFloat(neg + str1 + this.options.decSymbol + str2);
var xMinValue = parseFloat($(this.node).attr("minValue"));
var xMaxValue = parseFloat($(this.node).attr("maxValue"));
if (xValue > xMaxValue ||
xValue < xMinValue){
$(this.node).addClass("-error");
}else{
$(this.node).removeClass("-error");
}
// groupSymbols
if (this.options.groupDigits != 0){
var re = new RegExp("(\\d+)(\\d{"+ this.options.groupDigits +"})");
while(re.test(str1)) {
str1 = str1.replace(re, "$1"+ this.options.groupSymbol +"$2");
}
}
if (this.options.decDigits == 0){
decsymb = "";
}
this.domNode.value = this.options.currencySymbol + neg + str1 + decsymb + str2;
// this.setSelection( range );
curpos = this.domNode.value.length - curpos;
//posiciona após o ponto decimal
this.setSelection(curpos, curpos);
},
getObjForm: function() {
return this.node.getClosest('form');
},
submitForm: function() {
var form = this.getObjForm();
form.trigger('submit');
}
};
function Range( obj ){
this.range = obj.getSelectionRange();
this.len = obj.domNode.value.length
this.obj = obj;
this['0'] = this.range[0];
this['1'] = this.range[1];
}
Range.prototype = {
valueOf : function(){
var len = this.len - this.obj.domNode.value.length;
return [ this.range[0] - len, this.range[1] - len ];
},
replaceWith : function( str ){
var val = this.obj.domNode.value
, range = this.valueOf();
return val.substr( 0, range[0] ) + str + val.substr( range[1] );
}
};
$.fn.dbsmask = function(options){
this.each(function(){
new dbsmask($(this), options);
});
};
})(jQuery); | src/main/resources/META-INF/resources/js/dbsmask.js | (function($){
var $chk = function(obj){
return !!(obj || obj === 0);
};
var dbsmask = function(){
this.initialize.apply(this, arguments);
};
dbsmask.prototype = {
options: {
maskEmptyChr : ' ',
validNumbers : "1234567890",
validAlphas : "abcdefghijklmnopqrstuvwxyz",
validAlphaNums : "abcdefghijklmnopqrstuvwxyz1234567890",
groupDigits : 3,
decDigits : 2,
currencySymbol : '',
groupSymbol : ',',
decSymbol : '.',
showMask : true,
stripMask : false,
lastFocus : 0,
oldValue : '',
number : {
stripMask : false,
showMask : false
}
},
initialize: function(node, options) {
this.node = node;
this.domNode = node[0];
this.options = $.extend({}, this.options, this.options[options.type] || {}, options);
var self = this;
this.node
// .bind( "mousedown click", function(ev){ ev.stopPropagation(); ev.preventDefault(); } )
.bind( "mouseup", function(){ self.onMouseUp .apply(self, arguments); } )
.bind( "keypress", function(){ self.onKeyPress.apply(self, arguments); } )
.bind( "keydown", function(){ self.onKeyDown .apply(self, arguments); } )
.bind( "focus", function(){ self.onFocus .apply(self, arguments); } )
.bind( "blur", function(){ self.onBlur .apply(self, arguments); } );
},
isFixed : function(){ return this.options.type == 'fixed'; },
isNumber : function(){ return this.options.type == 'number'; },
onMouseUp: function( ev ) {
ev.stopPropagation();
ev.preventDefault();
//
if( this.isFixed() ) {
var p = this.getSelectionStart();
this.setSelection(p, (p + 1));
} else if(this.isNumber() ) {
// this.setEnd();
}
},
onKeyDown: function(ev) {
if (ev.dbs){return;}
if (ev.altKey && ev.which == 18){ //Option + Delete
ev.preventDefault();
return;
}else if(ev.ctrlKey || ev.altKey || ev.metaKey) {
return;
} else if(ev.which == 13) { // enter
// this.node.blur(); Comentado em 6/ago/2013 para evitar salto do campo e o submit
// this.submitForm(this.node);
} else if(!(ev.which == 9)) { // se não tab
if(this.options.type == "fixed") {
ev.preventDefault();
var p = this.getSelectionStart();
switch(ev.which) {
case 8: // Backspace
this.updateSelection( this.options.maskEmptyChr );
this.selectPrevious();
break;
case 36: // Home
this.selectFirst();
break;
case 35: // End
this.selectLast();
break;
case 37: // Left
case 38: // Up
this.selectPrevious();
break;
case 39: // Right
case 40: // Down
this.selectNext();
break;
case 46: // Delete
this.updateSelection( this.options.maskEmptyChr );
this.selectNext();
break;
case 173: // MENOS
this.setSignal(ev);
break;
case 189: // MENOS
this.setSignal(ev);
break;
default:
var chr = this.chrFromEv(ev);
if( this.isViableInput( p, chr ) ) {
this.updateSelection( ev.shiftKey ? chr.toUpperCase() : chr );
this.node.trigger("valid", ev, this.node);
this.selectNext();
} else {
this.node.trigger("invalid", ev, this.node);
}
break;
}
} else if(this.options.type == "number") {
switch(ev.which) {
case 16: // END
case 35: // END
case 36: // HOME
case 37: // LEFT
case 38: // UP
case 39: // RIGHT
case 40: // DOWN
break;
case 8: // backspace
case 46: // delete
var xStart = this.getSelectionStart();
var xEnd = this.getSelectionEnd();
if (ev.which == 8){ //Se for backspace
//Se não houver seleção manual
if (xStart == xEnd){
//Se caracter anterior for pontuação, inclui seleção da pontuação para ser excluido também
var xChar = this.domNode.value.charAt(xStart - 1);
if (xChar == this.options.groupSymbol){
xStart-=2;
this.setSelection(xStart, xEnd);
}else if (xChar == this.options.decSymbol){
xStart-=1;
this.setSelection(xStart, xStart);
}
}
}
//Inclui zeros a direita se estiver na digitação das casas decimais
if (this.isInputDecimals(ev)){
var xZeros = "0";
for(var len = this.getSelectionStart() + 1, i = this.getSelectionEnd(); len < i; len++) {
xZeros += "0";
}
this.domNode.value += xZeros;
this.setSelection(xStart, xEnd);
}
var self = this;
setTimeout(function(){
self.formatNumber();
}, 1);
break;
case 173: // MENOS
this.setSignal(ev);
break;
case 189: // MENOS
this.setSignal(ev);
break;
case 188: // VIRGULA
this.moveToDecimalPosition(ev);
break;
case 190: // PONTO
this.moveToDecimalPosition(ev);
break;
default:
ev.preventDefault();
//Se campo inteiro estiver selecionado, apaga conteúdo e posiciona na parte inteira
if (this.domNode.value.length == (this.getSelectionEnd() - this.getSelectionStart())){
this.domNode.value = "";
this.formatNumber();
this.moveToIntegerPosition(ev);
}
if (this.isInputDecimals(ev)){
//Seleciona digito anterior para digitação caminha para a direita
var curpos = this.getSelectionStart();
if (curpos == this.getSelectionEnd()){
this.setSelection(curpos, curpos + 1);
}
};
var chr = this.chrFromEv( ev );
if( this.isViableInput( p, chr ) ) {
var range = new Range( this )
, val = this.sanityTest( range.replaceWith( chr ) );
if(val !== false){
this.updateSelection( chr );
this.formatNumber();
}
this.node.trigger( "valid", ev, this.node );
} else {
this.node.trigger( "invalid", ev, this.node );
}
// var xEvent = new MouseEvent('keydown', {
// 'view': window,
// 'bubbles': true,
// 'cancelable': true
// });
// $(this.node).dispatchEvent(xEvent);
// this.node.trigger( "keydown", ev, this.node );
// $(this.node).trigger("keydown.dbsmask");
// var xEvent = $.Event( "change",{dbs: true});
//Dispara evento depois de fechar
// $(this.node).trigger(xEvent);
break;
}
}
}
},
onKeyPress: function(ev) {
if (ev.altKey && ev.which == 18){ //Option + Delete
ev.preventDefault();
return;
}
var key = ev.which || ev.keyCode;
if(
!( this.allowKeys[ key ] )
&& !(ev.ctrlKey || ev.altKey || ev.metaKey)
) {
ev.preventDefault();
ev.stopPropagation();
}
},
allowKeys : {
8 : 1 // backspace
, 9 : 1 // tab
, 13 : 1 // enter
, 35 : 1 // end
, 36 : 1 // home
, 37 : 1 // left
, 38 : 1 // up
, 39 : 1 // right
, 40 : 1 // down
, 46 : 1 // delete
},
moveToDecimalPosition: function(ev){
ev.preventDefault();
if (this.options.decDigits > 0){
var curpos = this.domNode.value.indexOf(this.options.decSymbol) + 1;
this.setSelection(curpos, curpos);
}
},
moveToIntegerPosition: function(ev){
ev.preventDefault();
if (this.options.decDigits > 0){
var curpos = this.domNode.value.indexOf(this.options.decSymbol);
this.setSelection(curpos, curpos);
}
},
isInputDecimals: function(ev){
if (this.options.decDigits > 0){
var curpos = this.domNode.value.indexOf(this.options.decSymbol);
if (curpos < this.getSelectionStart()){
return true;
}else{
return false;
}
}
},
setSignal: function(ev){
ev.preventDefault();
ev.stopPropagation();
if ($(this.node).attr("minValue") < 0){
if ($(this.node).attr("n") == "-"){
$(this.node).attr("n","");
}else{
$(this.node).attr("n","-");
}
this.formatNumber();
}
},
onFocus: function(ev) {
//Salva valor atual para comparar com o novo
this.options.oldValue = this.domNode.value;
ev.stopPropagation();
ev.preventDefault();
this.options.showMask && (this.domNode.value = this.wearMask(this.domNode.value));
this.sanityTest( this.domNode.value );
var self = this;
setTimeout( function(){
self[ self.options.type === "fixed" ? 'selectFirst' : 'selectAll' ]();
}, 1 );
},
onBlur: function(ev) {
ev.stopPropagation();
ev.preventDefault();
if(this.options.stripMask){
this.domNode.value = this.stripMask();
}
//Dispara evento se valor foi alterado
if (this.options.oldValue != this.domNode.value){
$(this.node).trigger("change");
}
},
selectAll: function() {
this.setSelection(0, this.domNode.value.length);
},
selectFirst: function() {
for(var i = 0, len = this.options.mask.length; i < len; i++) {
if(this.isInputPosition(i)) {
this.setSelection(i, (i + 1));
return;
}
}
},
selectLast: function() {
for(var i = (this.options.mask.length - 1); i >= 0; i--) {
if(this.isInputPosition(i)) {
this.setSelection(i, (i + 1));
return;
}
}
},
selectPrevious: function(p) {
if( !$chk(p) ){ p = this.getSelectionStart(); }
if(p <= 0) {
this.selectFirst();
} else {
if(this.isInputPosition(p - 1)) {
this.setSelection(p - 1, p);
} else {
this.selectPrevious(p - 1);
}
}
},
selectNext: function(p) {
if( !$chk(p) ){ p = this.getSelectionEnd(); }
if( this.isNumber() ){
this.setSelection( p+1, p+1 );
return;
}
if( p >= this.options.mask.length) {
this.selectLast();
} else {
if(this.isInputPosition(p)) {
this.setSelection(p, (p + 1));
} else {
this.selectNext(p + 1);
}
}
},
setSelection: function( a, b ) {
a = a.valueOf();
if( !b && a.splice ){
b = a[1];
a = a[0];
}
if(this.domNode.setSelectionRange) {
this.domNode.focus();
this.domNode.setSelectionRange(a, b);
} else if(this.domNode.createTextRange) {
var r = this.domNode.createTextRange();
r.collapse();
r.moveStart("character", a);
r.moveEnd("character", (b - a));
r.select();
}
},
updateSelection: function( chr ) {
var value = this.domNode.value
, range = new Range( this )
, output = range.replaceWith( chr );
this.domNode.value = output;
if( range[0] === range[1] ){
this.setSelection( range[0] + 1, range[0] + 1 );
}else{
this.setSelection( range );
}
},
setEnd: function() {
var len = this.domNode.value.length - this.options.decDigits;
if (this.options.decDigits > 0){
len--;
}
this.setSelection(len, len);
},
getSelectionRange : function(){
return [ this.getSelectionStart(), this.getSelectionEnd() ];
},
getSelectionStart: function() {
var p = 0,
n = this.domNode.selectionStart;
if( n ) {
if( typeof( n ) == "number" ){
p = n;
}
} else if( document.selection ){
var r = document.selection.createRange().duplicate();
r.moveEnd( "character", this.domNode.value.length );
p = this.domNode.value.lastIndexOf( r.text );
if( r.text == "" ){
p = this.domNode.value.length;
}
}
return p;
},
getSelectionEnd: function() {
var p = 0,
n = this.domNode.selectionEnd;
if( n ) {
if( typeof( n ) == "number"){
p = n;
}
} else if( document.selection ){
var r = document.selection.createRange().duplicate();
r.moveStart( "character", -this.domNode.value.length );
p = r.text.length;
}
return p;
},
isInputPosition: function(p) {
var mask = this.options.mask.toLowerCase();
var chr = mask.charAt(p);
return !!~"9ax".indexOf(chr);
},
sanityTest: function( str, p ){
var sanity = this.options.sanity;
if(sanity instanceof RegExp){
return sanity.test(str);
}else if($.isFunction(sanity)){
var ret = sanity(str, p);
if(typeof(ret) == 'boolean'){
return ret;
}else if(typeof(ret) != 'undefined'){
if( this.isFixed() ){
var p = this.getSelectionStart();
this.domNode.value = this.wearMask( ret );
this.setSelection( p, p+1 );
this.selectNext();
}else if( this.isNumber() ){
var range = new Range( this );
this.domNode.value = ret;
this.setSelection( range );
this.formatNumber();
}
return false;
}
}
},
isViableInput: function() {
return this[ this.isFixed() ? 'isViableFixedInput' : 'isViableNumericInput' ].apply( this, arguments );
},
isViableFixedInput : function( p, chr ){
var mask = this.options.mask.toLowerCase();
var chMask = mask.charAt(p);
var val = this.domNode.value.split('');
val.splice( p, 1, chr );
val = val.join('');
var ret = this.sanityTest( val, p );
if(typeof(ret) == 'boolean'){ return ret; }
if(({
'9' : this.options.validNumbers,
'a' : this.options.validAlphas,
'x' : this.options.validAlphaNums
}[chMask] || '').indexOf(chr) >= 0){
return true;
}
return false;
},
isViableNumericInput : function( p, chr ){
return !!~this.options.validNumbers.indexOf( chr );
},
wearMask: function(str) {
var mask = this.options.mask.toLowerCase()
, output = ""
, chrSets = {
'9' : 'validNumbers'
, 'a' : 'validAlphas'
, 'x' : 'validAlphaNums'
};
for(var i = 0, u = 0, len = mask.length; i < len; i++) {
switch(mask.charAt(i)) {
case '9':
case 'a':
case 'x':
output +=
((this.options[ chrSets[ mask.charAt(i) ] ].indexOf( str.charAt(u).toLowerCase() ) >= 0) && ( str.charAt(u) != ""))
? str.charAt( u++ )
: this.options.maskEmptyChr;
break;
default:
output += mask.charAt(i);
if( str.charAt(u) == mask.charAt(i) ){
u++;
}
break;
}
}
return output;
},
stripMask: function() {
var value = this.domNode.value;
if("" == value) return "";
var output = "";
if( this.isFixed() ) {
for(var i = 0, len = value.length; i < len; i++) {
if((value.charAt(i) != this.options.maskEmptyChr) && (this.isInputPosition(i)))
{output += value.charAt(i);}
}
} else if( this.isNumber() ) {
for(var i = 0, len = value.length; i < len; i++) {
if(this.options.validNumbers.indexOf(value.charAt(i)) >= 0)
{output += value.charAt(i);}
}
}
return output;
},
chrFromEv: function(ev) {
//Limita o tamanho de digitos
var chr = '', key = ev.which;
var xL = parseFloat($(this.node).attr("maxlength"));
if (xL!="NaN"){
if (this.domNode.value.length >= xL){
ev.preventDefault();
ev.stopPropagation();
ev.stopImmediatePropagation();
return chr;
}
}
if(key >= 96 && key <= 105){ key -= 48; } // shift number-pad numbers to corresponding character codes
chr = String.fromCharCode(key).toLowerCase(); // key pressed as a lowercase string
return chr;
},
formatNumber: function() {
// stripLeadingZeros
var olen = this.domNode.value.length
, str2 = this.stripMask()
, str1 = str2.replace( /^0+/, '' )
, range = new Range(this)
, neg = ""
, decsymb = this.options.decSymbol
, curpos = olen - range["1"];
//apaga sinal se houver
str1 = str1.replace('-', '');
//Impeder a exibição do sinal quando o valor for vázio
if (str1 == ""){
$(this.node).attr("n","");
}
//Configura sinal se houver
if ($(this.node).attr("n") == "-"){
neg = "-";
}
// wearLeadingZeros
str2 = str1;
str1 = "";
for(var len = str2.length, i = this.options.decDigits; len <= i; len++) {
str1 += "0";
}
str1 += str2;
// decimalSymbol
str2 = str1.substr(str1.length - this.options.decDigits);
str1 = str1.substring(0, (str1.length - this.options.decDigits));
//Verifica intervalo dos valores
var xValue = parseFloat(neg + str1 + this.options.decSymbol + str2);
var xMinValue = parseFloat($(this.node).attr("minValue"));
var xMaxValue = parseFloat($(this.node).attr("maxValue"));
if (xValue > xMaxValue ||
xValue < xMinValue){
$(this.node).addClass("-error");
}else{
$(this.node).removeClass("-error");
}
// groupSymbols
if (this.options.groupDigits != 0){
var re = new RegExp("(\\d+)(\\d{"+ this.options.groupDigits +"})");
while(re.test(str1)) {
str1 = str1.replace(re, "$1"+ this.options.groupSymbol +"$2");
}
}
if (this.options.decDigits == 0){
decsymb = "";
}
this.domNode.value = this.options.currencySymbol + neg + str1 + decsymb + str2;
// this.setSelection( range );
curpos = this.domNode.value.length - curpos;
//posiciona após o ponto decimal
this.setSelection(curpos, curpos);
},
getObjForm: function() {
return this.node.getClosest('form');
},
submitForm: function() {
var form = this.getObjForm();
form.trigger('submit');
}
};
function Range( obj ){
this.range = obj.getSelectionRange();
this.len = obj.domNode.value.length
this.obj = obj;
this['0'] = this.range[0];
this['1'] = this.range[1];
}
Range.prototype = {
valueOf : function(){
var len = this.len - this.obj.domNode.value.length;
return [ this.range[0] - len, this.range[1] - len ];
},
replaceWith : function( str ){
var val = this.obj.domNode.value
, range = this.valueOf();
return val.substr( 0, range[0] ) + str + val.substr( range[1] );
}
};
$.fn.dbsmask = function(options){
this.each(function(){
new dbsmask($(this), options);
});
};
})(jQuery); | Documentação | src/main/resources/META-INF/resources/js/dbsmask.js | Documentação | <ide><path>rc/main/resources/META-INF/resources/js/dbsmask.js
<ide> } else {
<ide> this.node.trigger( "invalid", ev, this.node );
<ide> }
<del>// var xEvent = new MouseEvent('keydown', {
<del>// 'view': window,
<del>// 'bubbles': true,
<del>// 'cancelable': true
<del>// });
<del>// $(this.node).dispatchEvent(xEvent);
<del>
<del>// this.node.trigger( "keydown", ev, this.node );
<del>// $(this.node).trigger("keydown.dbsmask");
<del>
<del>// var xEvent = $.Event( "change",{dbs: true});
<del> //Dispara evento depois de fechar
<del>// $(this.node).trigger(xEvent);
<ide> break;
<ide>
<ide> } |
|
Java | mit | error: pathspec 'src/scheduler/EdfScheduler.java' did not match any file(s) known to git
| 4ebd490d71ec0e5a5e712a4ac1cbcd9e370629a8 | 1 | hadoth/OperatingSystems02 | package scheduler;
/**
* Created by Karol Pokomeda on 2017-04-24.
*/
public class EdfScheduler {
}
| src/scheduler/EdfScheduler.java | Stable version
| src/scheduler/EdfScheduler.java | Stable version | <ide><path>rc/scheduler/EdfScheduler.java
<add>package scheduler;
<add>
<add>/**
<add> * Created by Karol Pokomeda on 2017-04-24.
<add> */
<add>public class EdfScheduler {
<add>} |
|
Java | apache-2.0 | 59875ea9ee19899933d8e4f98de0f51966370fc3 | 0 | icecondor/android,icecondor/android | package com.icecondor.nest;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.Timer;
import java.util.TimerTask;
import net.oauth.OAuthAccessor;
import net.oauth.OAuthException;
import net.oauth.client.OAuthClient;
import net.oauth.client.httpclient4.HttpClient4;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.conn.HttpHostConnectException;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.util.EntityUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.app.AlertDialog;
import android.content.ComponentName;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.ServiceConnection;
import android.content.SharedPreferences;
import android.content.res.Resources;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.graphics.drawable.Drawable;
import android.location.Location;
import android.net.Uri;
import android.os.Bundle;
import android.os.IBinder;
import android.os.RemoteException;
import android.preference.PreferenceManager;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.ViewGroup;
import android.view.Window;
import android.widget.EditText;
import android.widget.Toast;
import com.google.android.maps.GeoPoint;
import com.google.android.maps.MapActivity;
import com.google.android.maps.MapController;
import com.google.android.maps.MapView;
public class Radar extends MapActivity implements ServiceConnection,
Constants {
static final String appTag = "Radar";
MapController mapController;
PigeonService pigeon;
private Timer service_read_timer;
Intent settingsIntent, geoRssIntent;
SharedPreferences settings;
BirdOverlay nearbys;
FlockOverlay flock;
EditText uuid_field;
MapView mapView;
Drawable redMarker, greenMarker;
public void onCreate(Bundle savedInstanceState) {
Log.i(appTag, "onCreate");
super.onCreate(savedInstanceState);
settings = PreferenceManager.getDefaultSharedPreferences(this);
settingsIntent = new Intent(this, Settings.class);
geoRssIntent = new Intent(this, GeoRssList.class);
requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS);
setProgressBarIndeterminateVisibility(false);
setTitle(getString(R.string.app_name) + " v" + ICECONDOR_VERSION);
setContentView(R.layout.radar);
ViewGroup radar_zoom = (ViewGroup)findViewById(R.id.radar_mapview_zoom);
mapView = (MapView) findViewById(R.id.radar_mapview);
radar_zoom.addView(mapView.getZoomControls());
mapController = mapView.getController();
mapController.setZoom(15);
nearbys = new BirdOverlay();
mapView.getOverlays().add(nearbys);
Resources res = getResources();
redMarker = res.getDrawable(R.drawable.red_dot_12x20);
greenMarker = redMarker; // res.getDrawable(R.drawable.red_dot_12x20); // android bug?
flock = new FlockOverlay(redMarker, this);
mapView.getOverlays().add(flock);
}
public void scrollToLastFix() {
try {
if (pigeon != null) {
mapController = mapView.getController();
Location fix = pigeon.getLastFix();
Log.i(appTag, "pigeon says last fix is " + fix);
refreshBirdLocation();
if (fix != null) {
mapController.animateTo(new GeoPoint((int) (fix
.getLatitude() * 1000000), (int) (fix
.getLongitude() * 1000000)));
}
}
} catch (RemoteException e) {
Log.e(appTag, "error reading fix from pigeon.");
e.printStackTrace();
}
}
private void refreshBirdLocation() {
try {
if (pigeon!=null) {
nearbys.setLastLocalFix(pigeon.getLastFix());
nearbys.setLastPushedFix(pigeon.getLastPushedFix());
}
} catch (RemoteException e) {
nearbys.setLastLocalFix(null);
nearbys.setLastPushedFix(null);
}
}
@Override
public void onResume() {
super.onResume();
Log.i(appTag, "onResume yeah");
Intent pigeon_service = new Intent(this, Pigeon.class);
boolean result = bindService(pigeon_service, this, 0); // 0 = do not auto-start
Log.i(appTag, "pigeon bind result="+result);
startNeighborReadTimer();
}
@Override
public void onPause() {
super.onPause();
unbindService(this);
stopNeighborReadTimer();
Log.i(appTag, "onPause yeah");
}
@Override
protected boolean isRouteDisplayed() {
return false;
}
public boolean onCreateOptionsMenu(Menu menu) {
Log.i(appTag, "onCreateOptionsMenu");
boolean result = super.onCreateOptionsMenu(menu);
menu.add(Menu.NONE, 1, Menu.NONE, R.string.menu_last_fix).setIcon(android.R.drawable.ic_menu_mylocation);
menu.add(Menu.NONE, 2, Menu.NONE, R.string.menu_settings).setIcon(android.R.drawable.ic_menu_preferences);
menu.add(Menu.NONE, 3, Menu.NONE, R.string.menu_geo_rss).setIcon(R.drawable.bluerss);
menu.add(Menu.NONE, 4, Menu.NONE, pigeonStatusTitle()).setIcon(android.R.drawable.presence_invisible);
return result;
}
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(appTag, "menu:"+item.getItemId());
switch (item.getItemId()) {
case 1:
scrollToLastFix();
break;
case 2:
startActivity(settingsIntent);
break;
case 3:
startActivity(geoRssIntent);
break;
case 4:
togglePigeon();
item.setIcon(pigeonStatusIcon()).setTitle(pigeonStatusTitle());
break;
}
return false;
}
public boolean onPrepareOptionsMenu(Menu menu) {
boolean result = super.onPrepareOptionsMenu(menu);
menu.findItem(4).setIcon(pigeonStatusIcon()).setTitle(pigeonStatusTitle());
return result;
}
public boolean togglePigeon() {
try {
if (pigeon.isTransmitting()) {
pigeon.stopTransmitting();
return false;
} else {
if(!LocationRepositoriesSqlite.has_access_token(this)) {
// Alert the user that login is required
(new AlertDialog.Builder(this)).setMessage(
"Login to the location storage provider at "
+ ICECONDOR_URL_SHORTNAME
+ " to activate position recording.")
.setPositiveButton("Proceed",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog,
int whichButton) {
Log.i(appTag,"OAUTH request token retrieval");
Toast.makeText(Radar.this, "contacting server", Toast.LENGTH_SHORT).show();
// get the OAUTH request token
OAuthAccessor accessor = LocationRepositoriesSqlite
.defaultAccessor(Radar.this);
OAuthClient client = new OAuthClient(
new HttpClient4());
try {
client.getRequestToken(accessor);
String[] token_and_secret = new String[] {
accessor.requestToken,
accessor.tokenSecret };
Log.i(appTag, "request token: "
+ token_and_secret[0]
+ " secret:"
+ token_and_secret[1]);
LocationRepositoriesSqlite
.setDefaultRequestToken(
token_and_secret,
Radar.this);
Intent i = new Intent(
Intent.ACTION_VIEW);
String url = accessor.consumer.serviceProvider.userAuthorizationURL
+ "?oauth_token="
+ accessor.requestToken
+ "&oauth_callback="
+ accessor.consumer.callbackURL;
Log.i(appTag, "sending to "+url);
i.setData(Uri.parse(url));
startActivity(i);
} catch (IOException e) {
Toast.makeText(Radar.this, "server failed", Toast.LENGTH_SHORT).show();
e.printStackTrace();
} catch (OAuthException e) {
Toast.makeText(Radar.this, "server failed", Toast.LENGTH_SHORT).show();
e.printStackTrace();
} catch (URISyntaxException e) {
Toast.makeText(Radar.this, "server failed", Toast.LENGTH_SHORT).show();
e.printStackTrace();
}
}
}).setNegativeButton("Cancel",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog,
int whichButton) {
/* User clicked Cancel so do some stuff */
}
})
.show();
return false;
} else {
pigeon.startTransmitting();
return true;
}
}
} catch (RemoteException e) {
Log.e(appTag, "togglePigeon: pigeon communication error");
return false;
}
}
public int pigeonStatusIcon() {
try {
if(pigeon.isTransmitting()) {
return android.R.drawable.presence_online;
} else {
return android.R.drawable.presence_invisible;
}
} catch (RemoteException e) {
return android.R.drawable.presence_offline;
}
}
public int pigeonStatusTitle() {
try {
if(pigeon.isTransmitting()) {
return R.string.status_transmitting;
} else {
return R.string.status_not_transmitting;
}
} catch (RemoteException e) {
return R.string.status_error;
}
}
public void onServiceConnected(ComponentName className, IBinder service) {
Log.i(appTag, "onServiceConnected "+service);
pigeon = PigeonService.Stub.asInterface(service);
try {
Location fix = pigeon.getLastFix();
if (fix != null) {
scrollToLastFix();
} else {
Toast.makeText(this, "Waiting for first GPS fix", Toast.LENGTH_SHORT).show();
}
} catch (RemoteException e) {
}
}
public void onServiceDisconnected(ComponentName className) {
Log.i(appTag, "onServiceDisconnected "+className);
}
public void getNearbys() {
setProgressBarIndeterminateVisibility(true);
try {
HttpClient client = new DefaultHttpClient();
String url_with_params = ICECONDOR_READ_URL + "?id="
+ settings.getString(SETTING_OPENID, "");
Log.i(appTag, "GET " + url_with_params);
HttpGet get = new HttpGet(url_with_params);
get.getParams().setIntParameter("http.socket.timeout", 10000);
HttpResponse response;
response = client.execute(get);
Log.i(appTag, "http response: " + response.getStatusLine());
HttpEntity entity = response.getEntity();
String json = EntityUtils.toString(entity);
try {
JSONArray locations = new JSONArray(json);
Log.i(appTag, "parsed "+locations.length()+" locations");
for(int i=0; i < locations.length(); i++) {
JSONObject location = (JSONObject)locations.getJSONObject(i).get("location");
double longitude = location.getJSONObject("geom").getDouble("x");
double latitude = location.getJSONObject("geom").getDouble("y");
Log.i(appTag, "#"+i+" longititude: "+longitude+" latitude: "+latitude);
}
} catch (JSONException e) {
Log.i(appTag,"JSON exception: "+e);
}
} catch (ClientProtocolException e) {
Log.i(appTag, "client protocol exception " + e);
} catch (HttpHostConnectException e) {
Log.i(appTag, "connection failed "+e);
} catch (IOException e) {
Log.i(appTag, "IO exception "+e);
e.printStackTrace();
}
setProgressBarIndeterminateVisibility(false);
}
public void startNeighborReadTimer() {
service_read_timer = new Timer();
service_read_timer.scheduleAtFixedRate(new TimerTask() {
public void run() {
Log.i(appTag, "NeighborReadTimer fired");
//scrollToLastFix();
updateBirds();
//getNearbys();
}
}, 0, RADAR_REFRESH_INTERVAL);
}
protected void updateBirds() {
GeoRssSqlite rssdb = new GeoRssSqlite(this, "georss", null, 1);
SQLiteDatabase geoRssDb = rssdb.getReadableDatabase();
Cursor Urls = geoRssDb.query(GeoRssSqlite.SERVICES_TABLE, null, null, null, null, null, null);
while(Urls.moveToNext()) {
long url_id = Urls.getLong(Urls.getColumnIndex("_id"));
Log.i(appTag, "reading shouts db for #"+url_id+" "+Urls.getString(Urls.getColumnIndex("name")));
Cursor preshouts = geoRssDb.query(GeoRssSqlite.SHOUTS_TABLE, null, "service_id = ? and " +
"date <= ?",
new String[] {String.valueOf(url_id), Util.DateTimeIso8601(System.currentTimeMillis())},
null, null, "date desc", "1");
if(preshouts.getCount() > 0) {
preshouts.moveToFirst();
addBird(preshouts, redMarker);
}
preshouts.close();
Cursor postshouts = geoRssDb.query(GeoRssSqlite.SHOUTS_TABLE, null, "service_id = ? and " +
"date > ?",
new String[] {String.valueOf(url_id), Util.DateTimeIso8601(System.currentTimeMillis())},
null, null, "date asc", "1");
if (postshouts.getCount() > 0) {
postshouts.moveToFirst();
addBird(postshouts, greenMarker);
}
postshouts.close();
}
Urls.close();
geoRssDb.close();
rssdb.close();
}
private void addBird(Cursor displayShout, Drawable marker) {
String guid = displayShout.getString(displayShout.getColumnIndex("guid"));
if (!flock.contains(guid)) {
GeoPoint point = new GeoPoint((int) (displayShout
.getFloat(displayShout.getColumnIndex("lat")) * 1000000),
(int) (displayShout.getFloat(displayShout
.getColumnIndex("long")) * 1000000));
BirdItem bird = new BirdItem(point, guid, displayShout
.getString(displayShout.getColumnIndex("title")) + " " +
displayShout.getString(displayShout.getColumnIndex("date")));
bird.setMarker(marker);
flock.add(bird);
}
}
public void stopNeighborReadTimer() {
service_read_timer.cancel();
}
} | src/com/icecondor/nest/Radar.java | package com.icecondor.nest;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.Timer;
import java.util.TimerTask;
import net.oauth.OAuthAccessor;
import net.oauth.OAuthException;
import net.oauth.client.OAuthClient;
import net.oauth.client.httpclient4.HttpClient4;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.conn.HttpHostConnectException;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.util.EntityUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.app.AlertDialog;
import android.content.ComponentName;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.ServiceConnection;
import android.content.SharedPreferences;
import android.content.res.Resources;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.graphics.drawable.Drawable;
import android.location.Location;
import android.net.Uri;
import android.os.Bundle;
import android.os.IBinder;
import android.os.RemoteException;
import android.preference.PreferenceManager;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.ViewGroup;
import android.view.Window;
import android.widget.EditText;
import android.widget.Toast;
import com.google.android.maps.GeoPoint;
import com.google.android.maps.MapActivity;
import com.google.android.maps.MapController;
import com.google.android.maps.MapView;
public class Radar extends MapActivity implements ServiceConnection,
Constants {
static final String appTag = "Radar";
MapController mapController;
PigeonService pigeon;
private Timer service_read_timer;
Intent settingsIntent, geoRssIntent;
SharedPreferences settings;
BirdOverlay nearbys;
FlockOverlay flock;
EditText uuid_field;
MapView mapView;
Drawable redMarker, greenMarker;
public void onCreate(Bundle savedInstanceState) {
Log.i(appTag, "onCreate");
super.onCreate(savedInstanceState);
settings = PreferenceManager.getDefaultSharedPreferences(this);
settingsIntent = new Intent(this, Settings.class);
geoRssIntent = new Intent(this, GeoRssList.class);
requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS);
setProgressBarIndeterminateVisibility(false);
setTitle(getString(R.string.app_name) + " v" + ICECONDOR_VERSION);
setContentView(R.layout.radar);
ViewGroup radar_zoom = (ViewGroup)findViewById(R.id.radar_mapview_zoom);
mapView = (MapView) findViewById(R.id.radar_mapview);
radar_zoom.addView(mapView.getZoomControls());
mapController = mapView.getController();
mapController.setZoom(15);
nearbys = new BirdOverlay();
mapView.getOverlays().add(nearbys);
Resources res = getResources();
redMarker = res.getDrawable(R.drawable.red_dot_12x20);
greenMarker = redMarker; // res.getDrawable(R.drawable.red_dot_12x20); // android bug?
flock = new FlockOverlay(redMarker, this);
mapView.getOverlays().add(flock);
}
public void scrollToLastFix() {
try {
if (pigeon != null) {
mapController = mapView.getController();
Location fix = pigeon.getLastFix();
Log.i(appTag, "pigeon says last fix is " + fix);
refreshBirdLocation();
if (fix != null) {
mapController.animateTo(new GeoPoint((int) (fix
.getLatitude() * 1000000), (int) (fix
.getLongitude() * 1000000)));
}
}
} catch (RemoteException e) {
Log.e(appTag, "error reading fix from pigeon.");
e.printStackTrace();
}
}
private void refreshBirdLocation() {
try {
if (pigeon!=null) {
nearbys.setLastLocalFix(pigeon.getLastFix());
nearbys.setLastPushedFix(pigeon.getLastPushedFix());
}
} catch (RemoteException e) {
nearbys.setLastLocalFix(null);
nearbys.setLastPushedFix(null);
}
}
@Override
public void onResume() {
super.onResume();
Log.i(appTag, "onResume yeah");
Intent pigeon_service = new Intent(this, Pigeon.class);
boolean result = bindService(pigeon_service, this, 0); // 0 = do not auto-start
Log.i(appTag, "pigeon bind result="+result);
startNeighborReadTimer();
}
@Override
public void onPause() {
super.onPause();
unbindService(this);
stopNeighborReadTimer();
Log.i(appTag, "onPause yeah");
}
@Override
protected boolean isRouteDisplayed() {
return false;
}
public boolean onCreateOptionsMenu(Menu menu) {
Log.i(appTag, "onCreateOptionsMenu");
boolean result = super.onCreateOptionsMenu(menu);
menu.add(Menu.NONE, 1, Menu.NONE, R.string.menu_last_fix).setIcon(android.R.drawable.ic_menu_mylocation);
menu.add(Menu.NONE, 2, Menu.NONE, R.string.menu_settings).setIcon(android.R.drawable.ic_menu_preferences);
menu.add(Menu.NONE, 3, Menu.NONE, R.string.menu_geo_rss).setIcon(R.drawable.bluerss);
menu.add(Menu.NONE, 4, Menu.NONE, pigeonStatusTitle()).setIcon(android.R.drawable.presence_invisible);
return result;
}
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(appTag, "menu:"+item.getItemId());
switch (item.getItemId()) {
case 1:
scrollToLastFix();
break;
case 2:
startActivity(settingsIntent);
break;
case 3:
startActivity(geoRssIntent);
break;
case 4:
togglePigeon();
item.setIcon(pigeonStatusIcon()).setTitle(pigeonStatusTitle());
break;
}
return false;
}
public boolean onPrepareOptionsMenu(Menu menu) {
boolean result = super.onPrepareOptionsMenu(menu);
menu.findItem(4).setIcon(pigeonStatusIcon()).setTitle(pigeonStatusTitle());
return result;
}
public boolean togglePigeon() {
try {
if (pigeon.isTransmitting()) {
pigeon.stopTransmitting();
return false;
} else {
if(!LocationRepositoriesSqlite.has_access_token(this)) {
// Alert the user that login is required
(new AlertDialog.Builder(this)).setMessage(
"Login to the location storage provider at "
+ ICECONDOR_URL_SHORTNAME
+ " to activate position recording.")
.setPositiveButton("Proceed",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog,
int whichButton) {
Log.i(appTag,"OAUTH request token retrieval");
Toast.makeText(Radar.this, "contacting server", Toast.LENGTH_SHORT).show();
// get the OAUTH request token
OAuthAccessor accessor = LocationRepositoriesSqlite
.defaultAccessor(Radar.this);
OAuthClient client = new OAuthClient(
new HttpClient4());
try {
client.getRequestToken(accessor);
String[] token_and_secret = new String[] {
accessor.requestToken,
accessor.tokenSecret };
Log.i(appTag, "request token: "
+ token_and_secret[0]
+ " secret:"
+ token_and_secret[1]);
LocationRepositoriesSqlite
.setDefaultRequestToken(
token_and_secret,
Radar.this);
Intent i = new Intent(
Intent.ACTION_VIEW);
String url = accessor.consumer.serviceProvider.userAuthorizationURL
+ "?oauth_token="
+ accessor.requestToken
+ "&oauth_callback="
+ accessor.consumer.callbackURL;
Log.i(appTag, "sending to "+url);
i.setData(Uri.parse(url));
startActivity(i);
} catch (IOException e) {
Toast.makeText(Radar.this, "server failed", Toast.LENGTH_SHORT).show();
e.printStackTrace();
} catch (OAuthException e) {
Toast.makeText(Radar.this, "server failed", Toast.LENGTH_SHORT).show();
e.printStackTrace();
} catch (URISyntaxException e) {
Toast.makeText(Radar.this, "server failed", Toast.LENGTH_SHORT).show();
e.printStackTrace();
}
}
}).setNegativeButton("Cancel",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog,
int whichButton) {
/* User clicked Cancel so do some stuff */
}
})
.show();
return false;
} else {
pigeon.startTransmitting();
return true;
}
}
} catch (RemoteException e) {
Log.e(appTag, "togglePigeon: pigeon communication error");
return false;
}
}
public int pigeonStatusIcon() {
try {
if(pigeon.isTransmitting()) {
return android.R.drawable.presence_online;
} else {
return android.R.drawable.presence_invisible;
}
} catch (RemoteException e) {
return android.R.drawable.presence_offline;
}
}
public int pigeonStatusTitle() {
try {
if(pigeon.isTransmitting()) {
return R.string.status_transmitting;
} else {
return R.string.status_not_transmitting;
}
} catch (RemoteException e) {
return R.string.status_error;
}
}
public void onServiceConnected(ComponentName className, IBinder service) {
Log.i(appTag, "onServiceConnected "+service);
pigeon = PigeonService.Stub.asInterface(service);
scrollToLastFix();
try {
Location fix = pigeon.getLastFix();
if (fix != null) {
scrollToLastFix();
} else {
Toast.makeText(this, "Waiting for first GPS fix", Toast.LENGTH_SHORT).show();
}
} catch (RemoteException e) {
}
}
public void onServiceDisconnected(ComponentName className) {
Log.i(appTag, "onServiceDisconnected "+className);
}
public void getNearbys() {
setProgressBarIndeterminateVisibility(true);
try {
HttpClient client = new DefaultHttpClient();
String url_with_params = ICECONDOR_READ_URL + "?id="
+ settings.getString(SETTING_OPENID, "");
Log.i(appTag, "GET " + url_with_params);
HttpGet get = new HttpGet(url_with_params);
get.getParams().setIntParameter("http.socket.timeout", 10000);
HttpResponse response;
response = client.execute(get);
Log.i(appTag, "http response: " + response.getStatusLine());
HttpEntity entity = response.getEntity();
String json = EntityUtils.toString(entity);
try {
JSONArray locations = new JSONArray(json);
Log.i(appTag, "parsed "+locations.length()+" locations");
for(int i=0; i < locations.length(); i++) {
JSONObject location = (JSONObject)locations.getJSONObject(i).get("location");
double longitude = location.getJSONObject("geom").getDouble("x");
double latitude = location.getJSONObject("geom").getDouble("y");
Log.i(appTag, "#"+i+" longititude: "+longitude+" latitude: "+latitude);
}
} catch (JSONException e) {
Log.i(appTag,"JSON exception: "+e);
}
} catch (ClientProtocolException e) {
Log.i(appTag, "client protocol exception " + e);
} catch (HttpHostConnectException e) {
Log.i(appTag, "connection failed "+e);
} catch (IOException e) {
Log.i(appTag, "IO exception "+e);
e.printStackTrace();
}
setProgressBarIndeterminateVisibility(false);
}
public void startNeighborReadTimer() {
service_read_timer = new Timer();
service_read_timer.scheduleAtFixedRate(new TimerTask() {
public void run() {
Log.i(appTag, "NeighborReadTimer fired");
//scrollToLastFix();
updateBirds();
//getNearbys();
}
}, 0, RADAR_REFRESH_INTERVAL);
}
protected void updateBirds() {
GeoRssSqlite rssdb = new GeoRssSqlite(this, "georss", null, 1);
SQLiteDatabase geoRssDb = rssdb.getReadableDatabase();
Cursor Urls = geoRssDb.query(GeoRssSqlite.SERVICES_TABLE, null, null, null, null, null, null);
while(Urls.moveToNext()) {
long url_id = Urls.getLong(Urls.getColumnIndex("_id"));
Log.i(appTag, "reading shouts db for #"+url_id+" "+Urls.getString(Urls.getColumnIndex("name")));
Cursor preshouts = geoRssDb.query(GeoRssSqlite.SHOUTS_TABLE, null, "service_id = ? and " +
"date <= ?",
new String[] {String.valueOf(url_id), Util.DateTimeIso8601(System.currentTimeMillis())},
null, null, "date desc", "1");
if(preshouts.getCount() > 0) {
preshouts.moveToFirst();
addBird(preshouts, redMarker);
}
preshouts.close();
Cursor postshouts = geoRssDb.query(GeoRssSqlite.SHOUTS_TABLE, null, "service_id = ? and " +
"date > ?",
new String[] {String.valueOf(url_id), Util.DateTimeIso8601(System.currentTimeMillis())},
null, null, "date asc", "1");
if (postshouts.getCount() > 0) {
postshouts.moveToFirst();
addBird(postshouts, greenMarker);
}
postshouts.close();
}
Urls.close();
geoRssDb.close();
rssdb.close();
}
private void addBird(Cursor displayShout, Drawable marker) {
String guid = displayShout.getString(displayShout.getColumnIndex("guid"));
if (!flock.contains(guid)) {
GeoPoint point = new GeoPoint((int) (displayShout
.getFloat(displayShout.getColumnIndex("lat")) * 1000000),
(int) (displayShout.getFloat(displayShout
.getColumnIndex("long")) * 1000000));
BirdItem bird = new BirdItem(point, guid, displayShout
.getString(displayShout.getColumnIndex("title")) + " " +
displayShout.getString(displayShout.getColumnIndex("date")));
bird.setMarker(marker);
flock.add(bird);
}
}
public void stopNeighborReadTimer() {
service_read_timer.cancel();
}
} | cleanup
| src/com/icecondor/nest/Radar.java | cleanup | <ide><path>rc/com/icecondor/nest/Radar.java
<ide> public void onServiceConnected(ComponentName className, IBinder service) {
<ide> Log.i(appTag, "onServiceConnected "+service);
<ide> pigeon = PigeonService.Stub.asInterface(service);
<del> scrollToLastFix();
<ide> try {
<ide> Location fix = pigeon.getLastFix();
<ide> if (fix != null) { |
|
Java | mit | c6437e0226c096a727ad92a41268b53380bbe59c | 0 | CS2103JAN2017-F12-B2/main,CS2103JAN2017-F12-B2/main | package seedu.address.logic.parser;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
/**
* Tokenizes arguments string of the form: {@code preamble <prefix>value <prefix>value ...}<br>
* e.g. {@code some preamble text /t 11.00/dToday /t 12.00 /k /m July} where prefixes are {@code /t /d /k /m}.<br>
* 1. An argument's value can be an empty string e.g. the value of {@code /k} in the above example.<br>
* 2. Leading and trailing whitespaces of an argument value will be discarded.<br>
* 3. A prefix need not have leading and trailing spaces e.g. the {@code /d in 11.00/dToday} in the above example<br>
* 4. An argument may be repeated and all its values will be accumulated e.g. the value of {@code /t}
* in the above example.<br>
*/
public class ArgumentTokenizer {
/** Given prefixes **/
private final List<Prefix> prefixes;
/** Arguments found after tokenizing **/
private final Map<Prefix, List<String>> tokenizedArguments = new HashMap<>();
/**
* Creates an ArgumentTokenizer that can tokenize arguments string as described by prefixes
*/
public ArgumentTokenizer(Prefix... prefixes) {
this.prefixes = Arrays.asList(prefixes);
}
/**
* @param argsString arguments string of the form: preamble <prefix>value <prefix>value ...
*/
public void tokenize(String argsString) {
resetTokenizerState();
List<PrefixPosition> positions = findAllPrefixPositions(argsString);
extractArguments(argsString, positions);
}
/**
* Returns last value of given prefix.
*/
public Optional<String> getValue(Prefix prefix) {
return getAllValues(prefix).flatMap((values) -> Optional.of(values.get(values.size() - 1)));
}
/**
* Returns all values of given prefix.
*/
public Optional<List<String>> getAllValues(Prefix prefix) {
if (!this.tokenizedArguments.containsKey(prefix)) {
return Optional.empty();
}
List<String> values = new ArrayList<>(this.tokenizedArguments.get(prefix));
return Optional.of(values);
}
/**
* Returns the preamble (text before the first valid prefix), if any. Leading/trailing spaces will be trimmed.
* If the string before the first prefix is empty, Optional.empty() will be returned.
*/
public Optional<String> getPreamble() {
Optional<String> storedPreamble = getValue(new Prefix(""));
/* An empty preamble is considered 'no preamble present' */
if (storedPreamble.isPresent() && !storedPreamble.get().isEmpty()) {
return storedPreamble;
} else {
return Optional.empty();
}
}
private void resetTokenizerState() {
this.tokenizedArguments.clear();
}
/**
* Finds all positions in an arguments string at which any prefix appears
*/
private List<PrefixPosition> findAllPrefixPositions(String argsString) {
List<PrefixPosition> positions = new ArrayList<>();
for (Prefix prefix : this.prefixes) {
positions.addAll(findPrefixPositions(argsString, prefix));
}
return positions;
}
/**
* Finds all positions in an arguments string at which a given {@code prefix} appears
*/
private List<PrefixPosition> findPrefixPositions(String argsString, Prefix prefix) {
List<PrefixPosition> positions = new ArrayList<>();
int argumentStart = argsString.indexOf(prefix.getPrefix());
while (argumentStart != -1) {
PrefixPosition extendedPrefix = new PrefixPosition(prefix, argumentStart);
positions.add(extendedPrefix);
argumentStart = argsString.indexOf(prefix.getPrefix(), argumentStart + 1);
}
return positions;
}
/**
* Extracts the preamble/arguments and stores them in local variables.
* @param prefixPositions must contain all prefixes in the {@code argsString}
*/
private void extractArguments(String argsString, List<PrefixPosition> prefixPositions) {
// Sort by start position
prefixPositions.sort((prefix1, prefix2) -> prefix1.getStartPosition() - prefix2.getStartPosition());
// Insert a PrefixPosition to represent the preamble
PrefixPosition preambleMarker = new PrefixPosition(new Prefix(""), 0);
prefixPositions.add(0, preambleMarker);
// Add a dummy PrefixPosition to represent the end of the string
PrefixPosition endPositionMarker = new PrefixPosition(new Prefix(""), argsString.length());
prefixPositions.add(endPositionMarker);
// Extract the prefixed arguments and preamble (if any)
for (int i = 0; i < prefixPositions.size() - 1; i++) {
String argValue = extractArgumentValue(argsString, prefixPositions.get(i), prefixPositions.get(i + 1));
saveArgument(prefixPositions.get(i).getPrefix(), argValue);
}
}
/**
* Returns the trimmed value of the argument specified by {@code currentPrefixPosition}.
* The end position of the value is determined by {@code nextPrefixPosition}
*/
private String extractArgumentValue(String argsString,
PrefixPosition currentPrefixPosition,
PrefixPosition nextPrefixPosition) {
Prefix prefix = currentPrefixPosition.getPrefix();
int valueStartPos = currentPrefixPosition.getStartPosition() + prefix.getPrefix().length();
String value = argsString.substring(valueStartPos, nextPrefixPosition.getStartPosition());
if (prefix.getPrefix().equals("d/") ) {
// Parse date
value = parseNLPDate(value);
}
return value.trim();
}
private String parseNLPDate(String argsString) {
return "Stub";
}
/**
* Stores the value of the given prefix in the state of this tokenizer
*/
private void saveArgument(Prefix prefix, String value) {
if (this.tokenizedArguments.containsKey(prefix)) {
this.tokenizedArguments.get(prefix).add(value);
return;
}
List<String> values = new ArrayList<>();
values.add(value);
this.tokenizedArguments.put(prefix, values);
}
/**
* A prefix that marks the beginning of an argument.
* e.g. '/t' in 'add James /t friend'
*/
public static class Prefix {
final String prefix;
Prefix(String prefix) {
this.prefix = prefix;
}
String getPrefix() {
return this.prefix;
}
@Override
public int hashCode() {
return this.prefix == null ? 0 : this.prefix.hashCode();
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof Prefix)) {
return false;
}
if (obj == this) {
return true;
}
Prefix otherPrefix = (Prefix) obj;
return otherPrefix.getPrefix().equals(getPrefix());
}
}
/**
* Represents a prefix's position in an arguments string
*/
private class PrefixPosition {
private int startPosition;
private final Prefix prefix;
PrefixPosition(Prefix prefix, int startPosition) {
this.prefix = prefix;
this.startPosition = startPosition;
}
int getStartPosition() {
return this.startPosition;
}
Prefix getPrefix() {
return this.prefix;
}
}
}
| src/main/java/seedu/address/logic/parser/ArgumentTokenizer.java | package seedu.address.logic.parser;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
/**
* Tokenizes arguments string of the form: {@code preamble <prefix>value <prefix>value ...}<br>
* e.g. {@code some preamble text /t 11.00/dToday /t 12.00 /k /m July} where prefixes are {@code /t /d /k /m}.<br>
* 1. An argument's value can be an empty string e.g. the value of {@code /k} in the above example.<br>
* 2. Leading and trailing whitespaces of an argument value will be discarded.<br>
* 3. A prefix need not have leading and trailing spaces e.g. the {@code /d in 11.00/dToday} in the above example<br>
* 4. An argument may be repeated and all its values will be accumulated e.g. the value of {@code /t}
* in the above example.<br>
*/
public class ArgumentTokenizer {
/** Given prefixes **/
private final List<Prefix> prefixes;
/** Arguments found after tokenizing **/
private final Map<Prefix, List<String>> tokenizedArguments = new HashMap<>();
/**
* Creates an ArgumentTokenizer that can tokenize arguments string as described by prefixes
*/
public ArgumentTokenizer(Prefix... prefixes) {
this.prefixes = Arrays.asList(prefixes);
}
/**
* @param argsString arguments string of the form: preamble <prefix>value <prefix>value ...
*/
public void tokenize(String argsString) {
resetTokenizerState();
List<PrefixPosition> positions = findAllPrefixPositions(argsString);
extractArguments(argsString, positions);
}
/**
* Returns last value of given prefix.
*/
public Optional<String> getValue(Prefix prefix) {
return getAllValues(prefix).flatMap((values) -> Optional.of(values.get(values.size() - 1)));
}
/**
* Returns all values of given prefix.
*/
public Optional<List<String>> getAllValues(Prefix prefix) {
if (!this.tokenizedArguments.containsKey(prefix)) {
return Optional.empty();
}
List<String> values = new ArrayList<>(this.tokenizedArguments.get(prefix));
return Optional.of(values);
}
/**
* Returns the preamble (text before the first valid prefix), if any. Leading/trailing spaces will be trimmed.
* If the string before the first prefix is empty, Optional.empty() will be returned.
*/
public Optional<String> getPreamble() {
Optional<String> storedPreamble = getValue(new Prefix(""));
/* An empty preamble is considered 'no preamble present' */
if (storedPreamble.isPresent() && !storedPreamble.get().isEmpty()) {
return storedPreamble;
} else {
return Optional.empty();
}
}
private void resetTokenizerState() {
this.tokenizedArguments.clear();
}
/**
* Finds all positions in an arguments string at which any prefix appears
*/
private List<PrefixPosition> findAllPrefixPositions(String argsString) {
List<PrefixPosition> positions = new ArrayList<>();
for (Prefix prefix : this.prefixes) {
positions.addAll(findPrefixPositions(argsString, prefix));
}
return positions;
}
/**
* Finds all positions in an arguments string at which a given {@code prefix} appears
*/
private List<PrefixPosition> findPrefixPositions(String argsString, Prefix prefix) {
List<PrefixPosition> positions = new ArrayList<>();
int argumentStart = argsString.indexOf(prefix.getPrefix());
while (argumentStart != -1) {
PrefixPosition extendedPrefix = new PrefixPosition(prefix, argumentStart);
positions.add(extendedPrefix);
argumentStart = argsString.indexOf(prefix.getPrefix(), argumentStart + 1);
}
return positions;
}
/**
* Extracts the preamble/arguments and stores them in local variables.
* @param prefixPositions must contain all prefixes in the {@code argsString}
*/
private void extractArguments(String argsString, List<PrefixPosition> prefixPositions) {
// Sort by start position
prefixPositions.sort((prefix1, prefix2) -> prefix1.getStartPosition() - prefix2.getStartPosition());
// Insert a PrefixPosition to represent the preamble
PrefixPosition preambleMarker = new PrefixPosition(new Prefix(""), 0);
prefixPositions.add(0, preambleMarker);
// Add a dummy PrefixPosition to represent the end of the string
PrefixPosition endPositionMarker = new PrefixPosition(new Prefix(""), argsString.length());
prefixPositions.add(endPositionMarker);
// Extract the prefixed arguments and preamble (if any)
for (int i = 0; i < prefixPositions.size() - 1; i++) {
String argValue = extractArgumentValue(argsString, prefixPositions.get(i), prefixPositions.get(i + 1));
saveArgument(prefixPositions.get(i).getPrefix(), argValue);
}
}
/**
* Returns the trimmed value of the argument specified by {@code currentPrefixPosition}.
* The end position of the value is determined by {@code nextPrefixPosition}
*/
private String extractArgumentValue(String argsString,
PrefixPosition currentPrefixPosition,
PrefixPosition nextPrefixPosition) {
Prefix prefix = currentPrefixPosition.getPrefix();
int valueStartPos = currentPrefixPosition.getStartPosition() + prefix.getPrefix().length();
String value = argsString.substring(valueStartPos, nextPrefixPosition.getStartPosition());
return value.trim();
}
/**
* Stores the value of the given prefix in the state of this tokenizer
*/
private void saveArgument(Prefix prefix, String value) {
if (this.tokenizedArguments.containsKey(prefix)) {
this.tokenizedArguments.get(prefix).add(value);
return;
}
List<String> values = new ArrayList<>();
values.add(value);
this.tokenizedArguments.put(prefix, values);
}
/**
* A prefix that marks the beginning of an argument.
* e.g. '/t' in 'add James /t friend'
*/
public static class Prefix {
final String prefix;
Prefix(String prefix) {
this.prefix = prefix;
}
String getPrefix() {
return this.prefix;
}
@Override
public int hashCode() {
return this.prefix == null ? 0 : this.prefix.hashCode();
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof Prefix)) {
return false;
}
if (obj == this) {
return true;
}
Prefix otherPrefix = (Prefix) obj;
return otherPrefix.getPrefix().equals(getPrefix());
}
}
/**
* Represents a prefix's position in an arguments string
*/
private class PrefixPosition {
private int startPosition;
private final Prefix prefix;
PrefixPosition(Prefix prefix, int startPosition) {
this.prefix = prefix;
this.startPosition = startPosition;
}
int getStartPosition() {
return this.startPosition;
}
Prefix getPrefix() {
return this.prefix;
}
}
}
| Added stub to detect deadline text
| src/main/java/seedu/address/logic/parser/ArgumentTokenizer.java | Added stub to detect deadline text | <ide><path>rc/main/java/seedu/address/logic/parser/ArgumentTokenizer.java
<ide>
<ide> int valueStartPos = currentPrefixPosition.getStartPosition() + prefix.getPrefix().length();
<ide> String value = argsString.substring(valueStartPos, nextPrefixPosition.getStartPosition());
<del>
<add> if (prefix.getPrefix().equals("d/") ) {
<add> // Parse date
<add> value = parseNLPDate(value);
<add> }
<ide> return value.trim();
<add> }
<add>
<add> private String parseNLPDate(String argsString) {
<add> return "Stub";
<ide> }
<ide>
<ide> /** |
|
JavaScript | apache-2.0 | edb2f3da26fb4ae725db4a13c026579dd01d1e17 | 0 | wido/cloudstack,jcshen007/cloudstack,mufaddalq/cloudstack-datera-driver,mufaddalq/cloudstack-datera-driver,GabrielBrascher/cloudstack,jcshen007/cloudstack,wido/cloudstack,mufaddalq/cloudstack-datera-driver,jcshen007/cloudstack,resmo/cloudstack,jcshen007/cloudstack,GabrielBrascher/cloudstack,DaanHoogland/cloudstack,GabrielBrascher/cloudstack,DaanHoogland/cloudstack,resmo/cloudstack,DaanHoogland/cloudstack,wido/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,mufaddalq/cloudstack-datera-driver,wido/cloudstack,DaanHoogland/cloudstack,wido/cloudstack,wido/cloudstack,DaanHoogland/cloudstack,DaanHoogland/cloudstack,GabrielBrascher/cloudstack,resmo/cloudstack,wido/cloudstack,jcshen007/cloudstack,resmo/cloudstack,GabrielBrascher/cloudstack,jcshen007/cloudstack,DaanHoogland/cloudstack,jcshen007/cloudstack,resmo/cloudstack,resmo/cloudstack,mufaddalq/cloudstack-datera-driver,resmo/cloudstack,mufaddalq/cloudstack-datera-driver | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
(function($, cloudStack) {
var aclMultiEdit = {
noSelect: true,
fieldPreFilter: function(args) {
var context = args.context;
var hiddenFields = [];
if (context.networks) { // from tier detail view
hiddenFields.push('networkid');
}
return hiddenFields; // Returns fields to be hidden
},
fields: {
'cidrlist': { edit: true, label: 'label.cidr' },
'protocol': {
label: 'label.protocol',
select: function(args) {
args.$select.change(function() {
var $inputs = args.$form.find('input');
var $icmpFields = $inputs.filter(function() {
var name = $(this).attr('name');
return $.inArray(name, [
'icmptype',
'icmpcode'
]) > -1;
});
var $otherFields = $inputs.filter(function() {
var name = $(this).attr('name');
return name != 'icmptype' && name != 'icmpcode' && name != 'cidrlist';
});
var $protocolinput = args.$form.find('th,td');
var $protocolFields = $protocolinput.filter(function(){
var name = $(this).attr('rel');
return $.inArray(name,['protocolnumber']) > -1;
});
if($(this).val() == 'protocolnumber' ){
$protocolFields.show();
}
else{
$protocolFields.hide();
}
if ($(this).val() == 'icmp') {
$icmpFields.show();
$icmpFields.attr('disabled', false);
$otherFields.attr('disabled', 'disabled');
$otherFields.hide();
$otherFields.parent().find('label.error').hide();
} else {
$otherFields.show();
$otherFields.parent().find('label.error').hide();
$otherFields.attr('disabled', false);
$icmpFields.attr('disabled', 'disabled');
$icmpFields.hide();
$icmpFields.parent().find('label.error').hide();
}
});
args.response.success({
data: [
{ name: 'tcp', description: 'TCP' },
{ name: 'udp', description: 'UDP' },
{ name: 'icmp', description: 'ICMP' },
{ name: 'all', description: 'ALL'},
{ name: 'protocolnumber', description: 'Protocol Number'}
]
});
}
},
'protocolnumber': {label:'Protocol Number',isDisabled:true,isHidden:true,edit:true},
'startport': { edit: true, label: 'label.start.port' },
'endport': { edit: true, label: 'label.end.port' },
'networkid': {
label: 'Select Tier',
select: function(args) {
var data = {
listAll: true,
vpcid: args.context.vpc[0].id
};
// Only show selected tier, if viewing from detail view
if (args.context.networks &&
args.context.networks[0] &&
args.context.networks[0].vpcid) {
$.extend(data, {
id: args.context.networks[0].id
});
}
// Ajax Call to display the Tiers
$.ajax({
url: createURL('listNetworks'),
data: data,
success: function(json) {
var networks = json.listnetworksresponse.network;
args.response.success({
data: $(networks).map(function(index, network) {
return {
name: network.id,
description: network.name
};
})
});
}
});
}
},
'icmptype': { edit: true, label: 'ICMP.type', isDisabled: true, desc:'Please specify -1 if you want to allow all ICMP types', defaultValue:'-1' },
'icmpcode': { edit: true, label: 'ICMP.code', isDisabled: true, desc:'Please specify -1 if you want to allow all ICMP codes', defaultValue:'-1' },
'traffictype' : {
label: 'label.traffic.type',
select: function(args) {
args.response.success({
data: [
{ name: 'Ingress', description: 'Ingress' },
{ name: 'Egress', description: 'Egress' }
]
});
}
},
'add-rule': {
label: 'label.add.rule',
addButton: true
}
},
tags: cloudStack.api.tags({ resourceType: 'NetworkACL', contextId: 'multiRule' }),
add: {
label: 'label.add',
action: function(args) {
var $multi = args.$multi;
//Support for Protocol Number between 0 to 255
if(args.data.protocol == 'protocolnumber'){
$.extend(args.data,{protocol:args.data.protocolnumber});
delete args.data.protocolnumber;
}
else
delete args.data.protocolnumber;
$.ajax({
url: createURL('createNetworkACL'),
data: $.extend(args.data, {
networkid: args.context.networks ?
args.context.networks[0].id : args.data.networkid
}),
dataType: 'json',
success: function(data) {
args.response.success({
_custom: {
jobId: data.createnetworkaclresponse.jobid,
getUpdatedItem: function(json) {
var networkName = $multi.find('select[name=networkid] option[value=' + args.data.networkid + ']').html();
var data = $.extend(json.queryasyncjobresultresponse.jobresult.networkacl, {
networkid: networkName
});
var aclRules = $multi.data('acl-rules');
aclRules.push(data);
$multi.data('acl-rules', aclRules);
$(window).trigger('cloudStack.fullRefresh');
return data;
}
},
notification: {
label: 'label.add.ACL',
poll: pollAsyncJobResult
}
});
},
error: function(data) {
args.response.error(parseXMLHttpResponse(data));
}
});
}
},
actions: {
destroy: {
label: 'label.remove.ACL',
action: function(args) {
$.ajax({
url: createURL('deleteNetworkACL'),
data: {
id: args.context.multiRule[0].id
},
dataType: 'json',
async: true,
success: function(data) {
var jobID = data.deletenetworkaclresponse.jobid;
args.response.success({
_custom: {
jobId: jobID,
getUpdatedItem: function() {
$(window).trigger('cloudStack.fullRefresh');
}
},
notification: {
label: 'label.remove.ACL',
poll: pollAsyncJobResult
}
});
},
error: function(data) {
args.response.error(parseXMLHttpResponse(data));
}
});
}
}
},
dataProvider: function(args) {
var $multi = args.$multi;
var data = {
vpcid: args.context.vpc[0].id,
listAll: true
};
if (!$multi.data('acl-rules')) {
$multi.data('acl-rules', []);
}
if (args.context.networks &&
args.context.networks[0] &&
args.context.networks[0].vpcid) {
data.networkid = args.context.networks[0].id;
$.ajax({
url: createURL('listNetworkACLs'),
data: data,
dataType: 'json',
async: true,
success: function(json) {
args.response.success({
data: $(json.listnetworkaclsresponse.networkacl).map(function(index, acl) {
return $.extend(acl, {
networkid: args.context.networks[0].name
});
})
});
},
error: function(XMLHttpResponse) {
args.response.error(parseXMLHttpResponse(XMLHttpResponse));
}
});
} else {
args.response.success({ data: $multi.data('acl-rules') });
}
}
};
cloudStack.vpc = {
routerDetailView: function() {
return {
title: 'VPC router details',
updateContext: function(args) {
var router;
$.ajax({
url: createURL("listRouters&listAll=true&vpcid=" +args.context.vpc[0].id),
dataType: "json",
async: false,
success: function(json) {
router = json.listroutersresponse.router[0];
}
});
return {
routers: [router]
};
},
actions: cloudStack.sections.system.subsections.virtualRouters
.listView.detailView.actions,
tabs: {
routerDetails: cloudStack.sections.network.sections.vpc
.listView.detailView.tabs.router
}
};
},
vmListView: {
id: 'vpcTierInstances',
listView: {
filters: {
all: { label: 'label.menu.all.instances' },
running: { label: 'label.menu.running.instances' },
stopped: { label: 'label.menu.stopped.instances' },
destroyed: { label: 'label.menu.destroyed.instances' }
},
fields: {
name: { label: 'label.name', editable: true },
account: { label: 'label.account' },
zonename: { label: 'label.zone' },
state: {
label: 'label.status',
indicator: {
'Running': 'on',
'Stopped': 'off',
'Destroyed': 'off'
}
}
},
// List view actions
actions: {
start: {
label: 'label.action.start.instance' ,
action: function(args) {
$.ajax({
url: createURL("startVirtualMachine&id=" + args.context.vpcTierInstances[0].id),
dataType: "json",
async: true,
success: function(json) {
var jid = json.startvirtualmachineresponse.jobid;
args.response.success(
{_custom:
{jobId: jid,
getUpdatedItem: function(json) {
return json.queryasyncjobresultresponse.jobresult.virtualmachine;
},
getActionFilter: function() {
return cloudStack.actionFilter.vmActionFilter;
}
}
}
);
}
});
},
messages: {
confirm: function(args) {
return 'message.action.start.instance';
},
notification: function(args) {
return 'label.action.start.instance';
},
complete: function(args) {
if(args.password != null) {
alert('Password of the VM is ' + args.password);
}
return 'label.action.start.instance';
}
},
notification: {
poll: pollAsyncJobResult
}
},
stop: {
label: 'label.action.stop.instance',
addRow: 'false',
createForm: {
title: 'label.action.stop.instance',
desc: 'message.action.stop.instance',
fields: {
forced: {
label: 'force.stop',
isBoolean: true,
isChecked: false
}
}
},
action: function(args) {
var array1 = [];
array1.push("&forced=" + (args.data.forced == "on"));
$.ajax({
url: createURL("stopVirtualMachine&id=" + args.context.vpcTierInstances[0].id + array1.join("")),
dataType: "json",
async: true,
success: function(json) {
var jid = json.stopvirtualmachineresponse.jobid;
args.response.success(
{_custom:
{jobId: jid,
getUpdatedItem: function(json) {
return json.queryasyncjobresultresponse.jobresult.virtualmachine;
},
getActionFilter: function() {
return cloudStack.actionFilter.vmActionFilter;
}
}
}
);
}
});
},
messages: {
confirm: function(args) {
return 'message.action.stop.instance';
},
notification: function(args) {
return 'label.action.stop.instance';
}
},
notification: {
poll: pollAsyncJobResult
}
},
restart: {
label: 'instances.actions.reboot.label',
action: function(args) {
$.ajax({
url: createURL("rebootVirtualMachine&id=" + args.context.vpcTierInstances[0].id),
dataType: "json",
async: true,
success: function(json) {
var jid = json.rebootvirtualmachineresponse.jobid;
args.response.success(
{_custom:
{jobId: jid,
getUpdatedItem: function(json) {
return json.queryasyncjobresultresponse.jobresult.virtualmachine;
},
getActionFilter: function() {
return cloudStack.actionFilter.vmActionFilter;
}
}
}
);
}
});
},
messages: {
confirm: function(args) {
return 'message.action.reboot.instance';
},
notification: function(args) {
return 'instances.actions.reboot.label';
}
},
notification: {
poll: pollAsyncJobResult
}
},
destroy: {
label: 'label.action.destroy.instance',
messages: {
confirm: function(args) {
return 'message.action.destroy.instance';
},
notification: function(args) {
return 'label.action.destroy.instance';
}
},
action: function(args) {
$.ajax({
url: createURL("destroyVirtualMachine&id=" + args.context.vpcTierInstances[0].id),
dataType: "json",
async: true,
success: function(json) {
var jid = json.destroyvirtualmachineresponse.jobid;
args.response.success(
{_custom:
{jobId: jid,
getUpdatedItem: function(json) {
return json.queryasyncjobresultresponse.jobresult.virtualmachine;
},
getActionFilter: function() {
return cloudStack.actionFilter.vmActionFilter;
}
}
}
);
}
});
},
notification: {
poll: pollAsyncJobResult
}
},
restore: {
label: 'label.action.restore.instance',
messages: {
confirm: function(args) {
return 'message.action.restore.instance';
},
notification: function(args) {
return 'label.action.restore.instance';
}
},
action: function(args) {
$.ajax({
url: createURL("recoverVirtualMachine&id=" + args.context.vpcTierInstances[0].id),
dataType: "json",
async: true,
success: function(json) {
var item = json.recovervirtualmachineresponse.virtualmachine;
args.response.success({data:item});
}
});
}
},
viewConsole: {
label: 'label.view.console',
action: {
externalLink: {
url: function(args) {
return clientConsoleUrl + '?cmd=access&vm=' + args.context.vpcTierInstances[0].id;
},
title: function(args) {
return args.context.vpcTierInstances[0].id.substr(0,8); //title in window.open() can't have space nor longer than 8 characters. Otherwise, IE browser will have error.
},
width: 820,
height: 640
}
}
}
},
dataProvider: function(args) {
var array1 = [];
if(args.filterBy != null) {
if(args.filterBy.kind != null) {
switch(args.filterBy.kind) {
case "all":
array1.push("&listAll=true");
break;
case "mine":
if (!args.context.projects) array1.push("&domainid=" + g_domainid + "&account=" + g_account);
break;
case "running":
array1.push("&listAll=true&state=Running");
break;
case "stopped":
array1.push("&listAll=true&state=Stopped");
break;
case "destroyed":
array1.push("&listAll=true&state=Destroyed");
break;
}
}
if(args.filterBy.search != null && args.filterBy.search.by != null && args.filterBy.search.value != null) {
switch(args.filterBy.search.by) {
case "name":
if(args.filterBy.search.value.length > 0)
array1.push("&keyword=" + args.filterBy.search.value);
break;
}
}
}
$.ajax({
url: createURL('listVirtualMachines' + array1.join("")),
data: {
networkid: args.context.networks[0].id
},
success: function(json) {
args.response.success({
data: json.listvirtualmachinesresponse.virtualmachine,
actionFilter: cloudStack.actionFilter.vmActionFilter
});
}
});
}
}
},
ipAddresses: {
listView: function() {
var listView = $.extend(true, {}, cloudStack.sections.network.sections.ipAddresses);
listView.listView.fields = {
ipaddress: listView.listView.fields.ipaddress,
zonename: listView.listView.fields.zonename,
associatednetworkname: { label: 'label.network.name' },
state: listView.listView.fields.state
};
return listView;
}
},
acl: {
multiEdit: aclMultiEdit,
listView: {
listView: {
id: 'networks',
fields: {
tierName: { label: 'label.tier' },
aclTotal: { label: 'label.network.ACL.total' }
},
dataProvider: function(args) {
$.ajax({
url: createURL('listNetworks'),
data: {
listAll: true,
vpcid: args.context.vpc[0].id
},
success: function(json) {
var networks = json.listnetworksresponse.network ?
json.listnetworksresponse.network : [];
args.response.success({
data: $.map(networks, function(tier) {
var aclTotal = 0;
// Get ACL total
$.ajax({
url: createURL('listNetworkACLs'),
async: false,
data: {
listAll: true,
networkid: tier.id
},
success: function(json) {
aclTotal = json.listnetworkaclsresponse.networkacl ?
json.listnetworkaclsresponse.networkacl.length : 0;
}
});
return $.extend(tier, {
tierName: tier.name,
aclTotal: aclTotal
});
})
});
}
});
}
}
}
},
gateways: {
add: {
preCheck: function(args) {
if(isAdmin()) { //root-admin
var items;
$.ajax({
url: createURL('listPrivateGateways'),
async: false,
data: {
vpcid: args.context.vpc[0].id,
listAll: true
},
success: function(json) {
items = json.listprivategatewaysresponse.privategateway;
}
});
if (items && items.length) {
return true; //show private gateway listView
}
else {
return false; //show create private gateway dialog
}
}
else { //regular-user, domain-admin
return true; //show private gateway listView instead of create private gateway dialog because only root-admin is allowed to create private gateway
}
},
label: 'label.add.new.gateway',
messages: {
notification: function(args) {
return 'label.add.new.gateway';
}
},
createForm: {
title: 'label.add.new.gateway',
desc: 'message.add.new.gateway.to.vpc',
fields: {
physicalnetworkid: {
docID: 'helpVPCGatewayPhysicalNetwork',
label: 'label.physical.network',
select: function(args) {
$.ajax({
url: createURL("listPhysicalNetworks"),
data: {
zoneid: args.context.vpc[0].zoneid
},
success: function(json) {
var objs = json.listphysicalnetworksresponse.physicalnetwork;
var items = [];
$(objs).each(function() {
items.push({id: this.id, description: this.name});
});
args.response.success({data: items});
}
});
}
},
vlan: {
label: 'label.vlan', validation: { required: true },
docID: 'helpVPCGatewayVLAN'
},
ipaddress: {
label: 'label.ip.address', validation: { required: true },
docID: 'helpVPCGatewayIP'
},
gateway: {
label: 'label.gateway', validation: { required: true },
docID: 'helpVPCGatewayGateway'
},
netmask: {
label: 'label.netmask', validation: { required: true },
docID: 'helpVPCGatewayNetmask'
},
sourceNat:{
label:'Source NAT',
isBoolean:true,
isChecked:false
},
aclid:{
label:'ACL',
select:function(args){
$.ajax({
url: createURL('listNetworkACLLists'),
dataType: 'json',
async: true,
success: function(json) {
var objs = json.listnetworkacllistsresponse.networkacllist;
var items = [];
$(objs).each(function() {
if(this.name == "default_deny")
items.unshift({id:this.id,description:this.name});
else
items.push({id: this.id, description: this.name});
});
args.response.success({data: items});
}
});
}
}
}
},
action: function(args) {
var array1=[];
if(args.$form.find('.form-item[rel=sourceNat]').find('input[type=checkbox]').is(':Checked')== true) {
array1.push("&sourcenatsupported=true");
}
else
array1.push("&sourcenatsupported=false");
$.ajax({
url: createURL('createPrivateGateway'+ array1.join("")),
data: {
physicalnetworkid: args.data.physicalnetworkid,
vpcid: args.context.vpc[0].id,
ipaddress: args.data.ipaddress,
gateway: args.data.gateway,
netmask: args.data.netmask,
vlan: args.data.vlan,
aclid:args.data.aclid
},
success: function(json) {
var jid = json.createprivategatewayresponse.jobid;
args.response.success(
{_custom:
{jobId: jid,
getUpdatedItem: function(json) {
return json.queryasyncjobresultresponse.jobresult.privategateway;
}
}
}
);
},
error: function(json) {
args.response.error(parseXMLHttpResponse(json));
}
});
},
notification: {
poll: pollAsyncJobResult
}
},
listView: function() {
return {
listView: {
id: 'vpcGateways',
fields: {
ipaddress: { label: 'label.ip.address', validation: { required: true }},
gateway: { label: 'label.gateway', validation: { required: true }},
netmask: { label: 'label.netmask', validation: { required: true }},
vlan: { label: 'label.vlan', validation: { required: true }}
},
actions:{
add:{
label:'Add Private Gateway',
preFilter: function(args) {
if(isAdmin() || isDomainAdmin() )
return true;
else
return false;
},
createForm:{
title: 'label.add.new.gateway',
desc: 'message.add.new.gateway.to.vpc',
fields: {
physicalnetworkid: {
docID: 'helpVPCGatewayPhysicalNetwork',
label: 'label.physical.network',
select: function(args) {
$.ajax({
url: createURL("listPhysicalNetworks"),
data: {
zoneid: args.context.vpc[0].zoneid
},
success: function(json) {
var objs = json.listphysicalnetworksresponse.physicalnetwork;
var items = [];
$(objs).each(function() {
items.push({id: this.id, description: this.name});
});
args.response.success({data: items});
}
});
}
},
vlan: {
label: 'label.vlan', validation: { required: true },
docID: 'helpVPCGatewayVLAN'
},
ipaddress: {
label: 'label.ip.address', validation: { required: true },
docID: 'helpVPCGatewayIP'
},
gateway: {
label: 'label.gateway', validation: { required: true },
docID: 'helpVPCGatewayGateway'
},
netmask: {
label: 'label.netmask', validation: { required: true },
docID: 'helpVPCGatewayNetmask'
},
sourceNat:{
label:'Source NAT',
isBoolean:true,
isChecked:false
},
aclid:{
label:'ACL',
select:function(args){
$.ajax({
url: createURL('listNetworkACLLists'),
dataType: 'json',
async: true,
success: function(json) {
var objs = json.listnetworkacllistsresponse.networkacllist;
var items = [];
$(objs).each(function() {
if(this.name == "default_deny")
items.unshift({id:this.id,description:this.name});
else
items.push({id: this.id, description: this.name});
});
args.response.success({data: items});
}
});
}
}
}
},
action:function(args){
var array1=[];
if(args.$form.find('.form-item[rel=sourceNat]').find('input[type=checkbox]').is(':Checked')== true) {
array1.push("&sourcenatsupported=true");
}
else
array1.push("&sourcenatsupported=false");
$.ajax({
url: createURL('createPrivateGateway'+ array1.join("")),
data: {
physicalnetworkid: args.data.physicalnetworkid,
vpcid: args.context.vpc[0].id,
ipaddress: args.data.ipaddress,
gateway: args.data.gateway,
netmask: args.data.netmask,
vlan: args.data.vlan,
aclid:args.data.aclid
},
success: function(json) {
var jid = json.createprivategatewayresponse.jobid;
args.response.success(
{_custom:
{jobId: jid,
getUpdatedItem: function(json) {
return json.queryasyncjobresultresponse.jobresult.privategateway;
}
}
}
);
},
error: function(json) {
args.response.error(parseXMLHttpResponse(json));
}
});
},
notification: {
poll: pollAsyncJobResult
}
}
},
dataProvider: function(args) {
$.ajax({
url: createURL('listPrivateGateways'),
data: {
vpcid: args.context.vpc[0].id,
listAll: true
},
success: function(json) {
var items = json.listprivategatewaysresponse.privategateway;
args.response.success({ data: items });
}
});
},
detailView: {
name: 'label.details',
actions: {
remove: {
label: 'label.delete.gateway',
messages: {
confirm: function(args) {
return 'message.delete.gateway';
},
notification: function(args) {
return 'label.delete.gateway';
}
},
action: function(args) {
$.ajax({
url: createURL("deletePrivateGateway&id=" + args.context.vpcGateways[0].id),
dataType: "json",
async: true,
success: function(json) {
var jid = json.deleteprivategatewayresponse.jobid;
args.response.success(
{_custom:
{
jobId: jid
}
}
);
}
});
},
notification: {
poll: pollAsyncJobResult
}
},
replaceACL:{
label:'Replace ACL',
createForm:{
title:'Replace ACL',
label:'Replace ACL',
fields:{
aclid:{
label:'ACL',
select:function(args){
$.ajax({
url: createURL('listNetworkACLLists'),
dataType: 'json',
async: true,
success: function(json) {
var objs = json.listnetworkacllistsresponse.networkacllist;
var items = [];
$(objs).each(function() {
items.push({id: this.id, description: this.name});
});
args.response.success({data: items});
}
});
}
}
}
},
action: function(args) {
$.ajax({
url: createURL("replaceNetworkACLList&gatewayid=" + args.context.vpcGateways[0].id + "&aclid=" + args.data.aclid ),
dataType: "json",
success: function(json) {
var jid = json.replacenetworkacllistresponse.jobid;
args.response.success(
{_custom:
{
jobId: jid,
getUpdatedItem: function(json) {
var item = json.queryasyncjobresultresponse.jobresult.aclid;
return {data:item};
}
}
}
)
},
error:function(json){
args.response.error(parseXMLHttpResponse(json));
}
});
},
notification: {
poll: pollAsyncJobResult
},
messages: {
confirm: function(args) {
return 'Do you want to replace the ACL with a new one ?';
},
notification: function(args) {
return 'ACL replaced';
}
}
}
},
tabs: {
details: {
title: 'label.details',
fields: [
{
ipaddress: { label: 'label.ip.address' }
},
{
gateway: { label: 'label.gateway' },
netmask: { label: 'label.netmask'},
vlan: { label: 'label.vlan' },
state: { label: 'label.state' },
id: { label: 'label.id' },
zonename: { label: 'label.zone' },
domain: { label: 'label.domain' },
account: { label: 'label.account' },
sourcenatsupported:{
label: 'SourceNAT Supported' ,
converter: function(str) {
return str ? 'Yes' : 'No';
}
},
aclid:{label:'ACL id'}
}
],
dataProvider: function(args) {
$.ajax({
url: createURL('listPrivateGateways'),
data: {
id: args.context.vpcGateways[0].id,
listAll: true
},
success: function(json) {
var item = json.listprivategatewaysresponse.privategateway[0];
args.response.success({
data: item,
actionFilter: function(args) {
var allowedActions = [];
if(isAdmin()) {
allowedActions.push("remove");
allowedActions.push("replaceACL");
}
return allowedActions;
}
});
}
});
}
},
staticRoutes: {
title: 'Static Routes',
custom: function(args) {
return $('<div>').multiEdit({
noSelect: true,
context: args.context,
fields: {
cidr: { edit: true, label: 'label.CIDR.of.destination.network' },
'add-rule': {
label: 'label.add.route',
addButton: true
}
},
tags: cloudStack.api.tags({ resourceType: 'StaticRoute', contextId: 'multiRule' }),
add: {
label: 'label.add',
action: function(args) {
$.ajax({
url: createURL('createStaticRoute'),
data: {
gatewayid: args.context.vpcGateways[0].id,
cidr: args.data.cidr
},
success: function(data) {
args.response.success({
_custom: {
jobId: data.createstaticrouteresponse.jobid
},
notification: {
label: 'label.add.static.route',
poll: pollAsyncJobResult
}
});
},
error: function(data) {
args.response.error(parseXMLHttpResponse(data));
}
});
}
},
actions: {
destroy: {
label: 'label.remove.static.route',
action: function(args) {
$.ajax({
url: createURL('deleteStaticRoute'),
data: {
id: args.context.multiRule[0].id
},
dataType: 'json',
async: true,
success: function(data) {
var jobID = data.deletestaticrouteresponse.jobid;
args.response.success({
_custom: {
jobId: jobID
},
notification: {
label: 'label.remove.static.route',
poll: pollAsyncJobResult
}
});
}
});
}
}
},
dataProvider: function(args) {
$.ajax({
url: createURL('listStaticRoutes'),
data: {
gatewayid: args.context.vpcGateways[0].id,
listAll: true
},
success: function(json) {
var items = json.liststaticroutesresponse.staticroute;
args.response.success({ data: items });
}
});
}
});
}
}
}
}
}
};
}
},
siteToSiteVPN: {
title: 'label.site.to.site.VPN',
id: 'siteToSiteVpn',
sectionSelect: {
preFilter: function(args) {
return ["vpnGateway", "vpnConnection"];
},
label: 'label.select-view'
},
// This is a custom add function -- does not show in list view
add: {
// Check if VPN gateways exist
// -- if false, don't show list view
preCheck: function(args) {
var items;
$.ajax({
url: createURL('listVpnGateways&listAll=true'),
data: {
vpcid: args.context.vpc[0].id
},
async: false,
success: function(json) {
items = json.listvpngatewaysresponse.vpngateway;
}
});
if (items && items.length) {
return true;
}
return false;
},
label: 'label.add.VPN.gateway',
messages: {
notification: function(args) {
return 'label.add.VPN.gateway';
}
},
createForm: {
title: 'label.add.VPN.gateway',
desc: 'message.add.VPN.gateway',
fields: {}
},
action: function(args) {
$.ajax({
url: createURL("createVpnGateway"),
data: {
vpcid: args.context.vpc[0].id
},
success: function(json) {
var jid = json.createvpngatewayresponse.jobid;
args.response.success(
{_custom:
{
jobId: jid,
getUpdatedItem: function(json) {
return json.queryasyncjobresultresponse.jobresult.vpngateway;
}
}
}
);
}
});
},
notification: {
poll: pollAsyncJobResult
}
},
sections: {
vpnGateway: {
type: 'select',
title: 'label.VPN.gateway',
listView: {
id: 'vpnGateway',
label: 'label.VPN.gateway',
fields: {
publicip: { label: 'label.ip.address' },
account: { label: 'label.account' },
domain: { label: 'label.domain' }
},
dataProvider: function(args) {
var array1 = [];
if(args.filterBy != null) {
if(args.filterBy.search != null && args.filterBy.search.by != null && args.filterBy.search.value != null) {
switch(args.filterBy.search.by) {
case "name":
if(args.filterBy.search.value.length > 0)
array1.push("&keyword=" + args.filterBy.search.value);
break;
}
}
}
$.ajax({
url: createURL("listVpnGateways&listAll=true&page=" + args.page + "&pagesize=" + pageSize + array1.join("")),
data: {
vpcid: args.context.vpc[0].id
},
async: false,
success: function(json) {
var items = json.listvpngatewaysresponse.vpngateway;
args.response.success({data: items});
}
});
},
detailView: {
name: 'label.details',
actions: {
remove: {
label: 'label.delete.VPN.gateway',
messages: {
confirm: function(args) {
return 'message.delete.VPN.gateway';
},
notification: function(args) {
return 'label.delete.VPN.gateway';
}
},
action: function(args) {
$.ajax({
url: createURL("deleteVpnGateway"),
data: {
id: args.context.vpnGateway[0].id
},
success: function(json) {
var jid = json.deletevpngatewayresponse.jobid;
args.response.success(
{_custom:
{
jobId: jid
}
}
);
}
});
},
notification: {
poll: pollAsyncJobResult
}
}
},
tabs: {
details: {
title: 'label.details',
fields: [
{
publicip: { label: 'label.ip.address' }
},
{
id: { label: 'label.id' },
domain: { label: 'label.domain' },
account: { label: 'label.account' }
}
],
dataProvider: function(args) {
$.ajax({
url: createURL("listVpnGateways"),
data: {
id: args.context.vpnGateway[0].id
},
async: true,
success: function(json) {
var item = json.listvpngatewaysresponse.vpngateway[0];
args.response.success({data: item});
}
});
}
}
}
}
}
},
vpnConnection: {
type: 'select',
title: 'label.VPN.connection',
listView: {
id: 'vpnConnection',
label: 'label.VPN.connection',
fields: {
publicip: { label: 'label.ip.address' },
gateway: { label: 'label.gateway' },
state: {
label: 'label.state',
indicator: {
'Connected': 'on',
'Disconnected': 'off',
'Error': 'off'
}
},
ipsecpsk: { label: 'label.IPsec.preshared.key' },
ikepolicy: { label: 'label.IKE.policy' },
esppolicy: { label: 'label.ESP.policy' }
},
dataProvider: function(args) {
var array1 = [];
if(args.filterBy != null) {
if(args.filterBy.search != null && args.filterBy.search.by != null && args.filterBy.search.value != null) {
switch(args.filterBy.search.by) {
case "name":
if(args.filterBy.search.value.length > 0)
array1.push("&keyword=" + args.filterBy.search.value);
break;
}
}
}
$.ajax({
url: createURL("listVpnConnections&listAll=true&page=" + args.page + "&pagesize=" + pageSize + array1.join("")),
data: {
vpcid: args.context.vpc[0].id
},
success: function(json) {
var items = json.listvpnconnectionsresponse.vpnconnection;
args.response.success({data:items});
}
});
},
actions:{
add: {
label: 'label.create.VPN.connection',
messages: {
notification: function(args) {
return 'label.create.VPN.connection';
}
},
createForm: {
title: 'label.create.VPN.connection',
fields: {
vpncustomergatewayid: {
label: 'label.VPN.customer.gateway',
validation: { required: true },
select: function(args) {
$.ajax({
url: createURL("listVpnCustomerGateways"),
data: {
listAll: true
},
success: function(json) {
var items = json.listvpncustomergatewaysresponse.vpncustomergateway;
args.response.success({
data: $.map(items, function(item) {
return {
id: item.id,
description: item.name
};
})
});
}
});
}
}
}
},
action: function(args) {
var vpngatewayid = null;
$.ajax({
url: createURL('listVpnGateways'),
data: {
vpcid: args.context.vpc[0].id
},
async: false,
success: function(json) {
var items = json.listvpngatewaysresponse.vpngateway;
if(items != null && items.length > 0) {
vpngatewayid = items[0].id;
}
}
});
if(vpngatewayid == null) {
args.response.error('The selected VPC does not have a VPN gateway. Please create a VPN gateway for the VPC first.');
return;
}
$.ajax({
url: createURL('createVpnConnection'),
data: {
s2svpngatewayid: vpngatewayid,
s2scustomergatewayid: args.data.vpncustomergatewayid
},
success: function(json) {
var jid = json.createvpnconnectionresponse.jobid;
args.response.success(
{_custom:
{
jobId: jid,
getUpdatedItem: function(json) {
return json.queryasyncjobresultresponse.jobresult.vpnconnection;
}
}
}
);
},
error: function(xmlHttpResponse) {
args.response.error(parseXMLHttpResponse(xmlHttpResponse));
}
});
},
notification: {
poll: pollAsyncJobResult
}
}
},
detailView: {
name: 'label.details',
tabs: {
details: {
title: 'label.details',
fields: [
{
id: { label: 'label.id' },
//s2svpngatewayid: { label: 'VPN gateway ID' },
publicip: { label: 'label.ip.address' },
//s2scustomergatewayid: { label: 'Customer gateway ID' },
gateway: { label: 'label.gateway' },
cidrlist: { label: 'label.CIDR.list' },
ipsecpsk: { label: 'label.IPsec.preshared.key' },
ikepolicy: { label: 'label.IKE.policy' },
esppolicy: { label: 'label.ESP.policy' },
ikelifetime: { label: 'label.IKE.lifetime' },
esplifetime: {label: 'label.ESP.lifetime' },
dpd: {
label: 'label.dead.peer.detection',
converter: function(str) {
return str ? 'Yes' : 'No';
}
},
state: {label: 'label.state' },
created: { label: 'label.date', converter: cloudStack.converters.toLocalDate }
}
],
dataProvider: function(args) {
$.ajax({
url: createURL("listVpnConnections&id=" + args.context.vpnConnection[0].id),
dataType: "json",
async: true,
success: function(json) {
var item = json.listvpnconnectionsresponse.vpnconnection[0];
args.response.success({data: item});
}
});
}
}
},
actions: {
restart: {
label: 'label.reset.VPN.connection',
messages: {
confirm: function(args) {
return 'message.reset.VPN.connection' ;
},
notification: function(args) {
return 'label.reset.VPN.connection';
}
},
action: function(args) {
$.ajax({
url: createURL("resetVpnConnection"),
data: {
id: args.context.vpnConnection[0].id
},
dataType: "json",
async: true,
success: function(json) {
var jid = json.resetvpnconnectionresponse.jobid;
args.response.success(
{_custom:
{
jobId: jid,
getUpdatedItem: function(json) {
return json.queryasyncjobresultresponse.jobresult.vpnconnection;
}
}
}
);
}
});
},
notification: {
poll: pollAsyncJobResult
}
},
remove: {
label: 'label.delete.VPN.connection',
messages: {
confirm: function(args) {
return 'message.delete.VPN.connection';
},
notification: function(args) {
return 'label.delete.VPN.connection';
}
},
action: function(args) {
$.ajax({
url: createURL("deleteVpnConnection"),
dataType: "json",
data: {
id: args.context.vpnConnection[0].id
},
async: true,
success: function(json) {
var jid = json.deletevpnconnectionresponse.jobid;
args.response.success(
{_custom:
{
jobId: jid,
getUpdatedItem: function(json) {
return json.queryasyncjobresultresponse.jobresult.vpnconnection;
}
}
}
);
}
});
},
notification: {
poll: pollAsyncJobResult
}
}
}
}
}
}
}
},
tiers: {
detailView: { //duplicate from cloudStack.sections.network.sections.networks.listView.detailView (begin)
name: 'Guest network details',
viewAll: {
path: 'network.ipAddresses',
label: 'label.menu.ipaddresses',
preFilter: function(args) {
if (args.context.networks[0].state == 'Destroyed')
return false;
var services = args.context.networks[0].service;
if(services == null)
return false;
if(args.context.networks[0].type == "Isolated") {
for(var i=0; i < services.length; i++) {
var service = services[i];
if(service.name == "SourceNat") {
return true;
}
}
}
else if(args.context.networks[0].type == "Shared") {
var havingSecurityGroupService = false;
var havingElasticIpCapability = false;
var havingElasticLbCapability = false;
for(var i=0; i < services.length; i++) {
var service = services[i];
if(service.name == "SecurityGroup") {
havingSecurityGroupService = true;
}
else if(service.name == "StaticNat") {
$(service.capability).each(function(){
if(this.name == "ElasticIp" && this.value == "true") {
havingElasticIpCapability = true;
return false; //break $.each() loop
}
});
}
else if(service.name == "Lb") {
$(service.capability).each(function(){
if(this.name == "ElasticLb" && this.value == "true") {
havingElasticLbCapability = true;
return false; //break $.each() loop
}
});
}
}
if(havingSecurityGroupService == true && havingElasticIpCapability == true && havingElasticLbCapability == true)
return true;
else
return false;
}
return false;
}
},
actions: {
edit: {
label: 'label.edit',
messages: {
notification: function(args) {
return 'label.edit.network.details';
}
},
action: function(args) {
var array1 = [];
array1.push("&name=" + todb(args.data.name));
array1.push("&displaytext=" + todb(args.data.displaytext));
//args.data.networkdomain is null when networkdomain field is hidden
if(args.data.networkdomain != null && args.data.networkdomain != args.context.networks[0].networkdomain)
array1.push("&networkdomain=" + todb(args.data.networkdomain));
//args.data.networkofferingid is null when networkofferingid field is hidden
if(args.data.networkofferingid != null && args.data.networkofferingid != args.context.networks[0].networkofferingid) {
array1.push("&networkofferingid=" + todb(args.data.networkofferingid));
if(args.context.networks[0].type == "Isolated") { //Isolated network
cloudStack.dialog.confirm({
message: 'Do you want to keep the current guest network CIDR unchanged?',
action: function() { //"Yes" button is clicked
array1.push("&changecidr=false");
$.ajax({
url: createURL("updateNetwork&id=" + args.context.networks[0].id + array1.join("")),
dataType: "json",
success: function(json) {
var jid = json.updatenetworkresponse.jobid;
args.response.success(
{_custom:
{jobId: jid,
getUpdatedItem: function(json) {
var item = json.queryasyncjobresultresponse.jobresult.network;
return {data: item};
}
}
}
);
},
error:function(json) {
args.response.error(parseXMLHttpResponse(json));
}
});
},
cancelAction: function() { //"Cancel" button is clicked
array1.push("&changecidr=true");
$.ajax({
url: createURL("updateNetwork&id=" + args.context.networks[0].id + array1.join("")),
dataType: "json",
success: function(json) {
var jid = json.updatenetworkresponse.jobid;
args.response.success(
{_custom:
{jobId: jid,
getUpdatedItem: function(json) {
var item = json.queryasyncjobresultresponse.jobresult.network;
return {data: item};
}
}
}
);
},
error:function(json) {
args.response.error(parseXMLHttpResponse(json));
}
});
}
});
return;
}
}
$.ajax({
url: createURL("updateNetwork&id=" + args.context.networks[0].id + array1.join("")),
dataType: "json",
success: function(json) {
var jid = json.updatenetworkresponse.jobid;
args.response.success(
{_custom:
{jobId: jid,
getUpdatedItem: function(json) {
var item = json.queryasyncjobresultresponse.jobresult.network;
return {data: item};
}
}
}
);
},
error:function(json) {
args.response.error(parseXMLHttpResponse(json));
}
});
},
notification: {
poll: pollAsyncJobResult
}
},
'restart': {
label: 'label.restart.network',
createForm: {
title: 'label.restart.network',
desc: 'message.restart.network',
preFilter: function(args) {
var zoneObj;
$.ajax({
url: createURL("listZones&id=" + args.context.networks[0].zoneid),
dataType: "json",
async: false,
success: function(json){
zoneObj = json.listzonesresponse.zone[0];
}
});
if(zoneObj.networktype == "Basic") {
args.$form.find('.form-item[rel=cleanup]').find('input').removeAttr('checked'); //unchecked
args.$form.find('.form-item[rel=cleanup]').hide(); //hidden
}
else {
args.$form.find('.form-item[rel=cleanup]').find('input').attr('checked', 'checked'); //checked
args.$form.find('.form-item[rel=cleanup]').css('display', 'inline-block'); //shown
}
},
fields: {
cleanup: {
label: 'label.clean.up',
isBoolean: true
}
}
},
messages: {
notification: function(args) {
return 'label.restart.network';
}
},
action: function(args) {
var array1 = [];
array1.push("&cleanup=" + (args.data.cleanup == "on"));
$.ajax({
url: createURL("restartNetwork&id=" + args.context.networks[0].id + array1.join("")),
dataType: "json",
async: true,
success: function(json) {
var jid = json.restartnetworkresponse.jobid;
args.response.success(
{_custom:
{jobId: jid,
getUpdatedItem: function(json) {
return json.queryasyncjobresultresponse.jobresult.network;
}
}
}
);
}
});
},
notification: {
poll: pollAsyncJobResult
}
},
remove: {
label: 'label.action.delete.network',
messages: {
confirm: function(args) {
return 'message.action.delete.network';
},
notification: function(args) {
return 'label.action.delete.network';
}
},
action: function(args) {
$.ajax({
url: createURL("deleteNetwork&id=" + args.context.networks[0].id),
dataType: "json",
async: true,
success: function(json) {
var jid = json.deletenetworkresponse.jobid;
args.response.success(
{_custom:
{jobId: jid
}
}
);
}
});
},
notification: {
poll: pollAsyncJobResult
}
}
},
tabFilter: function(args) {
var networkOfferingHavingELB = false;
$.ajax({
url: createURL("listNetworkOfferings&id=" + args.context.networks[0].networkofferingid),
dataType: "json",
async: false,
success: function(json) {
var networkoffering = json.listnetworkofferingsresponse.networkoffering[0];
$(networkoffering.service).each(function(){
var thisService = this;
if(thisService.name == "Lb") {
$(thisService.capability).each(function(){
if(this.name == "ElasticLb" && this.value == "true") {
networkOfferingHavingELB = true;
return false; //break $.each() loop
}
});
return false; //break $.each() loop
}
});
}
});
var hiddenTabs = ['ipAddresses']; // Disable IP address tab; it is redundant with 'view all' button
if(networkOfferingHavingELB == false)
hiddenTabs.push("addloadBalancer");
return hiddenTabs;
},
isMaximized: true,
tabs: {
details: {
title: 'Network Details',
preFilter: function(args) {
var hiddenFields = [];
var zone;
$.ajax({
url: createURL('listZones'),
data: {
id: args.context.networks[0].zoneid
},
async: false,
success: function(json) {
zone = json.listzonesresponse.zone[0];
}
});
if(zone.networktype == "Basic") {
hiddenFields.push("account");
hiddenFields.push("gateway");
hiddenFields.push("vlan");
hiddenFields.push("cidr");
//hiddenFields.push("netmask");
}
if(args.context.networks[0].type == "Isolated") {
hiddenFields.push("networkofferingdisplaytext");
hiddenFields.push("networkdomaintext");
hiddenFields.push("gateway");
hiddenFields.push("networkofferingname");
//hiddenFields.push("netmask");
}
else { //selectedGuestNetworkObj.type == "Shared"
hiddenFields.push("networkofferingid");
hiddenFields.push("networkdomain");
}
return hiddenFields;
},
fields: [
{
name: {
label: 'label.name',
isEditable: true
}
},
{
id: { label: 'label.id' },
zonename: { label: 'label.zone' },
displaytext: {
label: 'label.description',
isEditable: true
},
type: {
label: 'label.type'
},
state: {
label: 'label.state'
},
ispersistent:{
label:'Persistent ',
converter:cloudStack.converters.toBooleanText
},
restartrequired: {
label: 'label.restart.required',
converter: function(booleanValue) {
if(booleanValue == true)
return "<font color='red'>Yes</font>";
else if(booleanValue == false)
return "No";
}
},
vlan: { label: 'label.vlan.id' },
networkofferingname: { label: 'label.network.offering' },
networkofferingid: {
label: 'label.network.offering',
isEditable: true,
select: function(args){
if (args.context.networks[0].state == 'Destroyed') {
args.response.success({ data: [] });
return;
}
var items = [];
$.ajax({
url: createURL("listNetworkOfferings&networkid=" + args.context.networks[0].id),
dataType: "json",
async: false,
success: function(json) {
var networkOfferingObjs = json.listnetworkofferingsresponse.networkoffering;
$(networkOfferingObjs).each(function() {
items.push({id: this.id, description: this.displaytext});
});
}
});
$.ajax({
url: createURL("listNetworkOfferings&id=" + args.context.networks[0].networkofferingid), //include currently selected network offeirng to dropdown
dataType: "json",
async: false,
success: function(json) {
var networkOfferingObjs = json.listnetworkofferingsresponse.networkoffering;
$(networkOfferingObjs).each(function() {
items.push({id: this.id, description: this.displaytext});
});
}
});
args.response.success({data: items});
}
},
gateway: { label: 'label.gateway' },
//netmask: { label: 'Netmask' },
cidr: { label: 'label.cidr' },
networkdomaintext: {
label: 'label.network.domain.text'
},
networkdomain: {
label: 'label.network.domain',
isEditable: true
},
domain: { label: 'label.domain' },
account: { label: 'label.account' }
}
],
dataProvider: function(args) {
$.ajax({
url: createURL("listNetworks&id=" + args.context.networks[0].id + "&listAll=true"), //pass "&listAll=true" to "listNetworks&id=xxxxxxxx" for now before API gets fixed.
data: { listAll: true },
dataType: "json",
async: true,
success: function(json) {
var jsonObj = json.listnetworksresponse.network[0];
args.response.success(
{
actionFilter: cloudStack.actionFilter.guestNetwork,
data: jsonObj
}
);
}
});
}
},
acl: {
title: 'label.network.ACL',
custom: function(args) {
// Widget renders ACL multi-edit, overriding this fn
return $('<div>');
}
},
ipAddresses: {
title: 'label.menu.ipaddresses',
custom: function(args) {
// Widget renders IP addresses, overriding this fn
return $('<div>');
}
},
addloadBalancer: {
title: 'label.add.load.balancer',
custom: function(args) {
var context = args.context;
return $('<div>').multiEdit(
{
context: context,
listView: $.extend(true, {}, cloudStack.sections.instances, {
listView: {
dataProvider: function(args) {
var networkid;
if('vpc' in args.context)
networkid = args.context.multiData.tier;
else
networkid = args.context.ipAddresses[0].associatednetworkid;
var data = {
page: args.page,
pageSize: pageSize,
networkid: networkid,
listAll: true
};
$.ajax({
url: createURL('listVirtualMachines'),
data: data,
dataType: 'json',
async: true,
success: function(data) {
args.response.success({
data: $.grep(
data.listvirtualmachinesresponse.virtualmachine ?
data.listvirtualmachinesresponse.virtualmachine : [],
function(instance) {
return $.inArray(instance.state, [
'Destroyed'
]) == -1;
}
)
});
},
error: function(data) {
args.response.error(parseXMLHttpResponse(data));
}
});
}
}
}),
multipleAdd: true,
fields: {
'name': { edit: true, label: 'label.name' },
'publicport': { edit: true, label: 'label.public.port' },
'privateport': { edit: true, label: 'label.private.port' },
'algorithm': {
label: 'label.algorithm',
select: function(args) {
args.response.success({
data: [
{ name: 'roundrobin', description: _l('label.round.robin') },
{ name: 'leastconn', description: _l('label.least.connections') },
{ name: 'source', description: _l('label.source') }
]
});
}
},
'sticky': {
label: 'label.stickiness',
custom: {
buttonLabel: 'label.configure',
action: cloudStack.lbStickyPolicy.dialog()
}
},
'add-vm': {
label: 'label.add.vm',
addButton: true
}
},
add: {
label: 'label.add.vm',
action: function(args) {
var data = {
algorithm: args.data.algorithm,
name: args.data.name,
privateport: args.data.privateport,
publicport: args.data.publicport,
openfirewall: false,
domainid: g_domainid,
account: g_account
};
if('vpc' in args.context) { //from VPC section
if(args.data.tier == null) {
args.response.error('Tier is required');
return;
}
$.extend(data, {
networkid: args.data.tier
});
}
else { //from Guest Network section
$.extend(data, {
networkid: args.context.networks[0].id
});
}
var stickyData = $.extend(true, {}, args.data.sticky);
$.ajax({
url: createURL('createLoadBalancerRule'),
data: data,
dataType: 'json',
async: true,
success: function(data) {
var itemData = args.itemData;
var jobID = data.createloadbalancerruleresponse.jobid;
$.ajax({
url: createURL('assignToLoadBalancerRule'),
data: {
id: data.createloadbalancerruleresponse.id,
virtualmachineids: $.map(itemData, function(elem) {
return elem.id;
}).join(',')
},
dataType: 'json',
async: true,
success: function(data) {
var lbCreationComplete = false;
args.response.success({
_custom: {
jobId: jobID
},
notification: {
label: 'label.add.load.balancer',
poll: function(args) {
var complete = args.complete;
var error = args.error;
pollAsyncJobResult({
_custom: args._custom,
complete: function(args) {
if (lbCreationComplete) {
return;
}
lbCreationComplete = true;
cloudStack.dialog.notice({
message: _l('message.add.load.balancer.under.ip') +
args.data.loadbalancer.publicip
});
if (stickyData &&
stickyData.methodname &&
stickyData.methodname != 'None') {
cloudStack.lbStickyPolicy.actions.add(
args.data.loadbalancer.id,
stickyData,
complete, // Complete
complete // Error
);
} else {
complete();
}
},
error: error
});
}
}
});
},
error: function(data) {
args.response.error(parseXMLHttpResponse(data));
}
});
},
error: function(data) {
args.response.error(parseXMLHttpResponse(data));
}
});
}
},
dataProvider: function(args) {
args.response.success({ //no LB listing in AddLoadBalancer tab
data: []
});
}
}
);
}
}
}
}, //duplicate from cloudStack.sections.network.sections.networks.listView.detailView (begin)
actionPreFilter: function(args) {
var tier = args.context.networks[0];
var state = tier.state;
return state == 'Running' ? ['start'] : ['stop'];
},
actions: {
add: {
label: 'label.add.new.tier',
createForm: {
title: 'label.add.new.tier',
fields: {
name: {
label: 'label.name',
validation: { required: true },
docID: 'helpTierName'
},
networkOfferingId: {
label: 'label.network.offering',
docID: 'helpTierNetworkOffering',
validation: { required: true },
dependsOn: 'zoneId',
select: function(args) {
var networkSupportingLbExists = false;
$.ajax({
url: createURL('listNetworks'),
data: {
vpcid: args.context.vpc[0].id,
supportedservices: 'LB'
},
success: function(json) {
var networkSupportingLbExists;
if(json.listnetworksresponse.network != null && json.listnetworksresponse.network.length > 0)
networkSupportingLbExists = true;
else
networkSupportingLbExists = false;
$.ajax({
url: createURL('listNetworkOfferings'),
data: {
forvpc: true,
zoneid: args.zoneId,
guestiptype: 'Isolated',
supportedServices: 'SourceNat',
specifyvlan: false,
state: 'Enabled'
},
success: function(json) {
var networkOfferings = json.listnetworkofferingsresponse.networkoffering;
var items;
if(networkSupportingLbExists == true) {
items = $.grep(networkOfferings, function(networkOffering) {
var includingLbService = false;
$(networkOffering.service).each(function(){
var thisService = this;
if(thisService.name == "Lb") {
includingLbService = true;
return false; //break $.each() loop
}
});
return !includingLbService;
});
}
else {
items = networkOfferings;
}
args.response.success({
data: $.map(items, function(item) {
return {
id: item.id,
description: item.name
};
})
});
}
});
}
});
}
},
gateway: {
label: 'label.gateway',
docID: 'helpTierGateway',
validation: { required: true }
},
netmask: {
label: 'label.netmask',
docID: 'helpTierNetmask',
validation: { required: true }
}
}
},
action: function(args) {
var dataObj = {
zoneId: args.context.vpc[0].zoneid,
vpcid: args.context.vpc[0].id,
domainid: args.context.vpc[0].domainid,
account: args.context.vpc[0].account,
networkOfferingId: args.data.networkOfferingId,
name: args.data.name,
displayText: args.data.name,
gateway: args.data.gateway,
netmask: args.data.netmask
};
$.ajax({
url: createURL('createNetwork'),
dataType: 'json',
data: dataObj,
success: function(json) {
args.response.success({
data: json.createnetworkresponse.network
});
},
error: function(XMLHttpResponse) {
args.response.error(parseXMLHttpResponse(XMLHttpResponse));
}
});
},
messages: {
notification: function() { return 'Add new tier'; }
}
},
/*
start: {
label: 'Start tier',
shortLabel: 'Start',
action: function(args) {
args.response.success();
},
notification: {
poll: function(args) { args.complete({ data: { state: 'Running' } }); }
}
},
*/
/*
stop: {
label: 'Stop tier',
shortLabel: 'Stop',
action: function(args) {
args.response.success();
},
notification: {
poll: function(args) { args.complete({ data: { state: 'Stopped' } }); }
}
},
*/
addVM: {
label: 'label.add.VM.to.tier',
shortLabel: 'label.add.vm',
action: cloudStack.uiCustom.instanceWizard(
$.extend(true, {}, cloudStack.instanceWizard, {
pluginForm: {
name: 'vpcTierInstanceWizard'
}
})
),
notification: {
poll: pollAsyncJobResult
}
},
// Removing ACL buttons from the tier chart
/* acl: {
label: 'Configure ACL for tier',
shortLabel: 'ACL',
multiEdit: aclMultiEdit
}, */
remove: {
label: 'label.remove.tier',
action: function(args) {
$.ajax({
url: createURL('deleteNetwork'),
dataType: "json",
data: {
id: args.context.networks[0].id
},
success: function(json) {
var jid = json.deletenetworkresponse.jobid;
args.response.success(
{_custom:
{jobId: jid
}
}
);
}
});
},
notification: {
poll: pollAsyncJobResult
}
}
},
// Get tiers
dataProvider: function(args) {
$.ajax({
url: createURL("listNetworks"),
dataType: "json",
data: {
vpcid: args.context.vpc[0].id,
//listAll: true, //do not pass listAll to listNetworks under VPC
domainid: args.context.vpc[0].domainid,
account: args.context.vpc[0].account
},
async: true,
success: function(json) {
var networks = json.listnetworksresponse.network;
if(networks != null && networks.length > 0) {
for(var i = 0; i < networks.length; i++) {
$.ajax({
url: createURL("listVirtualMachines"),
dataType: "json",
data: {
networkid: networks[i].id,
listAll: true
},
async: false,
success: function(json) {
networks[i].virtualMachines = json.listvirtualmachinesresponse.virtualmachine;
}
});
}
}
args.response.success({ tiers: networks });
}
});
}
}
};
}(jQuery, cloudStack));
| ui/scripts/vpc.js | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
(function($, cloudStack) {
var aclMultiEdit = {
noSelect: true,
fieldPreFilter: function(args) {
var context = args.context;
var hiddenFields = [];
if (context.networks) { // from tier detail view
hiddenFields.push('networkid');
}
return hiddenFields; // Returns fields to be hidden
},
fields: {
'cidrlist': { edit: true, label: 'label.cidr' },
'protocol': {
label: 'label.protocol',
select: function(args) {
args.$select.change(function() {
var $inputs = args.$form.find('input');
var $icmpFields = $inputs.filter(function() {
var name = $(this).attr('name');
return $.inArray(name, [
'icmptype',
'icmpcode'
]) > -1;
});
var $otherFields = $inputs.filter(function() {
var name = $(this).attr('name');
return name != 'icmptype' && name != 'icmpcode' && name != 'cidrlist';
});
var $protocolinput = args.$form.find('th,td');
var $protocolFields = $protocolinput.filter(function(){
var name = $(this).attr('rel');
return $.inArray(name,['protocolnumber']) > -1;
});
if($(this).val() == 'protocolnumber' ){
$protocolFields.show();
}
else{
$protocolFields.hide();
}
if ($(this).val() == 'icmp') {
$icmpFields.show();
$icmpFields.attr('disabled', false);
$otherFields.attr('disabled', 'disabled');
$otherFields.hide();
$otherFields.parent().find('label.error').hide();
} else {
$otherFields.show();
$otherFields.parent().find('label.error').hide();
$otherFields.attr('disabled', false);
$icmpFields.attr('disabled', 'disabled');
$icmpFields.hide();
$icmpFields.parent().find('label.error').hide();
}
});
args.response.success({
data: [
{ name: 'tcp', description: 'TCP' },
{ name: 'udp', description: 'UDP' },
{ name: 'icmp', description: 'ICMP' },
{ name: 'all', description: 'ALL'},
{ name: 'protocolnumber', description: 'Protocol Number'}
]
});
}
},
'protocolnumber': {label:'Protocol Number',isDisabled:true,isHidden:true,edit:true},
'startport': { edit: true, label: 'label.start.port' },
'endport': { edit: true, label: 'label.end.port' },
'networkid': {
label: 'Select Tier',
select: function(args) {
var data = {
listAll: true,
vpcid: args.context.vpc[0].id
};
// Only show selected tier, if viewing from detail view
if (args.context.networks &&
args.context.networks[0] &&
args.context.networks[0].vpcid) {
$.extend(data, {
id: args.context.networks[0].id
});
}
// Ajax Call to display the Tiers
$.ajax({
url: createURL('listNetworks'),
data: data,
success: function(json) {
var networks = json.listnetworksresponse.network;
args.response.success({
data: $(networks).map(function(index, network) {
return {
name: network.id,
description: network.name
};
})
});
}
});
}
},
'icmptype': { edit: true, label: 'ICMP.type', isDisabled: true, desc:'Please specify -1 if you want to allow all ICMP types', defaultValue:'-1' },
'icmpcode': { edit: true, label: 'ICMP.code', isDisabled: true, desc:'Please specify -1 if you want to allow all ICMP codes', defaultValue:'-1' },
'traffictype' : {
label: 'label.traffic.type',
select: function(args) {
args.response.success({
data: [
{ name: 'Ingress', description: 'Ingress' },
{ name: 'Egress', description: 'Egress' }
]
});
}
},
'add-rule': {
label: 'label.add.rule',
addButton: true
}
},
tags: cloudStack.api.tags({ resourceType: 'NetworkACL', contextId: 'multiRule' }),
add: {
label: 'label.add',
action: function(args) {
var $multi = args.$multi;
//Support for Protocol Number between 0 to 255
if(args.data.protocol == 'protocolnumber'){
$.extend(args.data,{protocol:args.data.protocolnumber});
delete args.data.protocolnumber;
}
else
delete args.data.protocolnumber;
$.ajax({
url: createURL('createNetworkACL'),
data: $.extend(args.data, {
networkid: args.context.networks ?
args.context.networks[0].id : args.data.networkid
}),
dataType: 'json',
success: function(data) {
args.response.success({
_custom: {
jobId: data.createnetworkaclresponse.jobid,
getUpdatedItem: function(json) {
var networkName = $multi.find('select[name=networkid] option[value=' + args.data.networkid + ']').html();
var data = $.extend(json.queryasyncjobresultresponse.jobresult.networkacl, {
networkid: networkName
});
var aclRules = $multi.data('acl-rules');
aclRules.push(data);
$multi.data('acl-rules', aclRules);
$(window).trigger('cloudStack.fullRefresh');
return data;
}
},
notification: {
label: 'label.add.ACL',
poll: pollAsyncJobResult
}
});
},
error: function(data) {
args.response.error(parseXMLHttpResponse(data));
}
});
}
},
actions: {
destroy: {
label: 'label.remove.ACL',
action: function(args) {
$.ajax({
url: createURL('deleteNetworkACL'),
data: {
id: args.context.multiRule[0].id
},
dataType: 'json',
async: true,
success: function(data) {
var jobID = data.deletenetworkaclresponse.jobid;
args.response.success({
_custom: {
jobId: jobID,
getUpdatedItem: function() {
$(window).trigger('cloudStack.fullRefresh');
}
},
notification: {
label: 'label.remove.ACL',
poll: pollAsyncJobResult
}
});
},
error: function(data) {
args.response.error(parseXMLHttpResponse(data));
}
});
}
}
},
dataProvider: function(args) {
var $multi = args.$multi;
var data = {
vpcid: args.context.vpc[0].id,
listAll: true
};
if (!$multi.data('acl-rules')) {
$multi.data('acl-rules', []);
}
if (args.context.networks &&
args.context.networks[0] &&
args.context.networks[0].vpcid) {
data.networkid = args.context.networks[0].id;
$.ajax({
url: createURL('listNetworkACLs'),
data: data,
dataType: 'json',
async: true,
success: function(json) {
args.response.success({
data: $(json.listnetworkaclsresponse.networkacl).map(function(index, acl) {
return $.extend(acl, {
networkid: args.context.networks[0].name
});
})
});
},
error: function(XMLHttpResponse) {
args.response.error(parseXMLHttpResponse(XMLHttpResponse));
}
});
} else {
args.response.success({ data: $multi.data('acl-rules') });
}
}
};
cloudStack.vpc = {
routerDetailView: function() {
return {
title: 'VPC router details',
updateContext: function(args) {
var router;
$.ajax({
url: createURL("listRouters&listAll=true&vpcid=" +args.context.vpc[0].id),
dataType: "json",
async: false,
success: function(json) {
router = json.listroutersresponse.router[0];
}
});
return {
routers: [router]
};
},
actions: cloudStack.sections.system.subsections.virtualRouters
.listView.detailView.actions,
tabs: {
routerDetails: cloudStack.sections.network.sections.vpc
.listView.detailView.tabs.router
}
};
},
vmListView: {
id: 'vpcTierInstances',
listView: {
filters: {
all: { label: 'label.menu.all.instances' },
running: { label: 'label.menu.running.instances' },
stopped: { label: 'label.menu.stopped.instances' },
destroyed: { label: 'label.menu.destroyed.instances' }
},
fields: {
name: { label: 'label.name', editable: true },
account: { label: 'label.account' },
zonename: { label: 'label.zone' },
state: {
label: 'label.status',
indicator: {
'Running': 'on',
'Stopped': 'off',
'Destroyed': 'off'
}
}
},
// List view actions
actions: {
start: {
label: 'label.action.start.instance' ,
action: function(args) {
$.ajax({
url: createURL("startVirtualMachine&id=" + args.context.vpcTierInstances[0].id),
dataType: "json",
async: true,
success: function(json) {
var jid = json.startvirtualmachineresponse.jobid;
args.response.success(
{_custom:
{jobId: jid,
getUpdatedItem: function(json) {
return json.queryasyncjobresultresponse.jobresult.virtualmachine;
},
getActionFilter: function() {
return cloudStack.actionFilter.vmActionFilter;
}
}
}
);
}
});
},
messages: {
confirm: function(args) {
return 'message.action.start.instance';
},
notification: function(args) {
return 'label.action.start.instance';
},
complete: function(args) {
if(args.password != null) {
alert('Password of the VM is ' + args.password);
}
return 'label.action.start.instance';
}
},
notification: {
poll: pollAsyncJobResult
}
},
stop: {
label: 'label.action.stop.instance',
addRow: 'false',
createForm: {
title: 'label.action.stop.instance',
desc: 'message.action.stop.instance',
fields: {
forced: {
label: 'force.stop',
isBoolean: true,
isChecked: false
}
}
},
action: function(args) {
var array1 = [];
array1.push("&forced=" + (args.data.forced == "on"));
$.ajax({
url: createURL("stopVirtualMachine&id=" + args.context.vpcTierInstances[0].id + array1.join("")),
dataType: "json",
async: true,
success: function(json) {
var jid = json.stopvirtualmachineresponse.jobid;
args.response.success(
{_custom:
{jobId: jid,
getUpdatedItem: function(json) {
return json.queryasyncjobresultresponse.jobresult.virtualmachine;
},
getActionFilter: function() {
return cloudStack.actionFilter.vmActionFilter;
}
}
}
);
}
});
},
messages: {
confirm: function(args) {
return 'message.action.stop.instance';
},
notification: function(args) {
return 'label.action.stop.instance';
}
},
notification: {
poll: pollAsyncJobResult
}
},
restart: {
label: 'instances.actions.reboot.label',
action: function(args) {
$.ajax({
url: createURL("rebootVirtualMachine&id=" + args.context.vpcTierInstances[0].id),
dataType: "json",
async: true,
success: function(json) {
var jid = json.rebootvirtualmachineresponse.jobid;
args.response.success(
{_custom:
{jobId: jid,
getUpdatedItem: function(json) {
return json.queryasyncjobresultresponse.jobresult.virtualmachine;
},
getActionFilter: function() {
return cloudStack.actionFilter.vmActionFilter;
}
}
}
);
}
});
},
messages: {
confirm: function(args) {
return 'message.action.reboot.instance';
},
notification: function(args) {
return 'instances.actions.reboot.label';
}
},
notification: {
poll: pollAsyncJobResult
}
},
destroy: {
label: 'label.action.destroy.instance',
messages: {
confirm: function(args) {
return 'message.action.destroy.instance';
},
notification: function(args) {
return 'label.action.destroy.instance';
}
},
action: function(args) {
$.ajax({
url: createURL("destroyVirtualMachine&id=" + args.context.vpcTierInstances[0].id),
dataType: "json",
async: true,
success: function(json) {
var jid = json.destroyvirtualmachineresponse.jobid;
args.response.success(
{_custom:
{jobId: jid,
getUpdatedItem: function(json) {
return json.queryasyncjobresultresponse.jobresult.virtualmachine;
},
getActionFilter: function() {
return cloudStack.actionFilter.vmActionFilter;
}
}
}
);
}
});
},
notification: {
poll: pollAsyncJobResult
}
},
restore: {
label: 'label.action.restore.instance',
messages: {
confirm: function(args) {
return 'message.action.restore.instance';
},
notification: function(args) {
return 'label.action.restore.instance';
}
},
action: function(args) {
$.ajax({
url: createURL("recoverVirtualMachine&id=" + args.context.vpcTierInstances[0].id),
dataType: "json",
async: true,
success: function(json) {
var item = json.recovervirtualmachineresponse.virtualmachine;
args.response.success({data:item});
}
});
}
},
viewConsole: {
label: 'label.view.console',
action: {
externalLink: {
url: function(args) {
return clientConsoleUrl + '?cmd=access&vm=' + args.context.vpcTierInstances[0].id;
},
title: function(args) {
return args.context.vpcTierInstances[0].id.substr(0,8); //title in window.open() can't have space nor longer than 8 characters. Otherwise, IE browser will have error.
},
width: 820,
height: 640
}
}
}
},
dataProvider: function(args) {
var array1 = [];
if(args.filterBy != null) {
if(args.filterBy.kind != null) {
switch(args.filterBy.kind) {
case "all":
array1.push("&listAll=true");
break;
case "mine":
if (!args.context.projects) array1.push("&domainid=" + g_domainid + "&account=" + g_account);
break;
case "running":
array1.push("&listAll=true&state=Running");
break;
case "stopped":
array1.push("&listAll=true&state=Stopped");
break;
case "destroyed":
array1.push("&listAll=true&state=Destroyed");
break;
}
}
if(args.filterBy.search != null && args.filterBy.search.by != null && args.filterBy.search.value != null) {
switch(args.filterBy.search.by) {
case "name":
if(args.filterBy.search.value.length > 0)
array1.push("&keyword=" + args.filterBy.search.value);
break;
}
}
}
$.ajax({
url: createURL('listVirtualMachines' + array1.join("")),
data: {
networkid: args.context.networks[0].id
},
success: function(json) {
args.response.success({
data: json.listvirtualmachinesresponse.virtualmachine,
actionFilter: cloudStack.actionFilter.vmActionFilter
});
}
});
}
}
},
ipAddresses: {
listView: function() {
var listView = $.extend(true, {}, cloudStack.sections.network.sections.ipAddresses);
listView.listView.fields = {
ipaddress: listView.listView.fields.ipaddress,
zonename: listView.listView.fields.zonename,
associatednetworkname: { label: 'label.network.name' },
state: listView.listView.fields.state
};
return listView;
}
},
acl: {
multiEdit: aclMultiEdit,
listView: {
listView: {
id: 'networks',
fields: {
tierName: { label: 'label.tier' },
aclTotal: { label: 'label.network.ACL.total' }
},
dataProvider: function(args) {
$.ajax({
url: createURL('listNetworks'),
data: {
listAll: true,
vpcid: args.context.vpc[0].id
},
success: function(json) {
var networks = json.listnetworksresponse.network ?
json.listnetworksresponse.network : [];
args.response.success({
data: $.map(networks, function(tier) {
var aclTotal = 0;
// Get ACL total
$.ajax({
url: createURL('listNetworkACLs'),
async: false,
data: {
listAll: true,
networkid: tier.id
},
success: function(json) {
aclTotal = json.listnetworkaclsresponse.networkacl ?
json.listnetworkaclsresponse.networkacl.length : 0;
}
});
return $.extend(tier, {
tierName: tier.name,
aclTotal: aclTotal
});
})
});
}
});
}
}
}
},
gateways: {
add: {
preCheck: function(args) {
if(isAdmin()) { //root-admin
var items;
$.ajax({
url: createURL('listPrivateGateways'),
async: false,
data: {
vpcid: args.context.vpc[0].id,
listAll: true
},
success: function(json) {
items = json.listprivategatewaysresponse.privategateway;
}
});
if (items && items.length) {
return true; //show private gateway listView
}
else {
return false; //show create private gateway dialog
}
}
else { //regular-user, domain-admin
return true; //show private gateway listView instead of create private gateway dialog because only root-admin is allowed to create private gateway
}
},
label: 'label.add.new.gateway',
messages: {
notification: function(args) {
return 'label.add.new.gateway';
}
},
createForm: {
title: 'label.add.new.gateway',
desc: 'message.add.new.gateway.to.vpc',
fields: {
physicalnetworkid: {
docID: 'helpVPCGatewayPhysicalNetwork',
label: 'label.physical.network',
select: function(args) {
$.ajax({
url: createURL("listPhysicalNetworks"),
data: {
zoneid: args.context.vpc[0].zoneid
},
success: function(json) {
var objs = json.listphysicalnetworksresponse.physicalnetwork;
var items = [];
$(objs).each(function() {
items.push({id: this.id, description: this.name});
});
args.response.success({data: items});
}
});
}
},
vlan: {
label: 'label.vlan', validation: { required: true },
docID: 'helpVPCGatewayVLAN'
},
ipaddress: {
label: 'label.ip.address', validation: { required: true },
docID: 'helpVPCGatewayIP'
},
gateway: {
label: 'label.gateway', validation: { required: true },
docID: 'helpVPCGatewayGateway'
},
netmask: {
label: 'label.netmask', validation: { required: true },
docID: 'helpVPCGatewayNetmask'
},
sourceNat:{
label:'Source NAT',
isBoolean:true,
isChecked:false
},
aclid:{
label:'ACL',
select:function(args){
$.ajax({
url: createURL('listNetworkACLLists'),
dataType: 'json',
async: true,
success: function(json) {
var objs = json.listnetworkacllistsresponse.networkacllist;
var items = [];
$(objs).each(function() {
if(this.name == "default_deny")
items.unshift({id:this.id,description:this.name});
else
items.push({id: this.id, description: this.name});
});
args.response.success({data: items});
}
});
}
}
}
},
action: function(args) {
var array1=[];
if(args.$form.find('.form-item[rel=sourceNat]').find('input[type=checkbox]').is(':Checked')== true) {
array1.push("&sourcenatsupported=true");
}
else
array1.push("&sourcenatsupported=false");
$.ajax({
url: createURL('createPrivateGateway'+ array1.join("")),
data: {
physicalnetworkid: args.data.physicalnetworkid,
vpcid: args.context.vpc[0].id,
ipaddress: args.data.ipaddress,
gateway: args.data.gateway,
netmask: args.data.netmask,
vlan: args.data.vlan,
aclid:args.data.aclid
},
success: function(json) {
var jid = json.createprivategatewayresponse.jobid;
args.response.success(
{_custom:
{jobId: jid,
getUpdatedItem: function(json) {
return json.queryasyncjobresultresponse.jobresult.privategateway;
}
}
}
);
},
error: function(json) {
args.response.error(parseXMLHttpResponse(json));
}
});
},
notification: {
poll: pollAsyncJobResult
}
},
listView: function() {
return {
listView: {
id: 'vpcGateways',
fields: {
ipaddress: { label: 'label.ip.address', validation: { required: true }},
gateway: { label: 'label.gateway', validation: { required: true }},
netmask: { label: 'label.netmask', validation: { required: true }},
vlan: { label: 'label.vlan', validation: { required: true }}
},
actions:{
add:{
label:'Add Private Gateway',
preFilter: function(args) {
if(isAdmin() || isDomainAdmin() )
return true;
else
return false;
},
createForm:{
title: 'label.add.new.gateway',
desc: 'message.add.new.gateway.to.vpc',
fields: {
physicalnetworkid: {
docID: 'helpVPCGatewayPhysicalNetwork',
label: 'label.physical.network',
select: function(args) {
$.ajax({
url: createURL("listPhysicalNetworks"),
data: {
zoneid: args.context.vpc[0].zoneid
},
success: function(json) {
var objs = json.listphysicalnetworksresponse.physicalnetwork;
var items = [];
$(objs).each(function() {
items.push({id: this.id, description: this.name});
});
args.response.success({data: items});
}
});
}
},
vlan: {
label: 'label.vlan', validation: { required: true },
docID: 'helpVPCGatewayVLAN'
},
ipaddress: {
label: 'label.ip.address', validation: { required: true },
docID: 'helpVPCGatewayIP'
},
gateway: {
label: 'label.gateway', validation: { required: true },
docID: 'helpVPCGatewayGateway'
},
netmask: {
label: 'label.netmask', validation: { required: true },
docID: 'helpVPCGatewayNetmask'
},
sourceNat:{
label:'Source NAT',
isBoolean:true,
isChecked:false
},
aclid:{
label:'ACL',
select:function(args){
$.ajax({
url: createURL('listNetworkACLLists'),
dataType: 'json',
async: true,
success: function(json) {
var objs = json.listnetworkacllistsresponse.networkacllist;
var items = [];
$(objs).each(function() {
if(this.name == "default_deny")
items.unshift({id:this.id,description:this.name});
else
items.push({id: this.id, description: this.name});
});
args.response.success({data: items});
}
});
}
}
}
},
action:function(args){
var array1=[];
if(args.$form.find('.form-item[rel=sourceNat]').find('input[type=checkbox]').is(':Checked')== true) {
array1.push("&sourcenatsupported=true");
}
else
array1.push("&sourcenatsupported=false");
$.ajax({
url: createURL('createPrivateGateway'+ array1.join("")),
data: {
physicalnetworkid: args.data.physicalnetworkid,
vpcid: args.context.vpc[0].id,
ipaddress: args.data.ipaddress,
gateway: args.data.gateway,
netmask: args.data.netmask,
vlan: args.data.vlan,
aclid:args.data.aclid
},
success: function(json) {
var jid = json.createprivategatewayresponse.jobid;
args.response.success(
{_custom:
{jobId: jid,
getUpdatedItem: function(json) {
return json.queryasyncjobresultresponse.jobresult.privategateway;
}
}
}
);
},
error: function(json) {
args.response.error(parseXMLHttpResponse(json));
}
});
},
notification: {
poll: pollAsyncJobResult
}
}
},
dataProvider: function(args) {
$.ajax({
url: createURL('listPrivateGateways'),
data: {
vpcid: args.context.vpc[0].id,
listAll: true
},
success: function(json) {
var items = json.listprivategatewaysresponse.privategateway;
args.response.success({ data: items });
}
});
},
detailView: {
name: 'label.details',
actions: {
remove: {
label: 'label.delete.gateway',
messages: {
confirm: function(args) {
return 'message.delete.gateway';
},
notification: function(args) {
return 'label.delete.gateway';
}
},
action: function(args) {
$.ajax({
url: createURL("deletePrivateGateway&id=" + args.context.vpcGateways[0].id),
dataType: "json",
async: true,
success: function(json) {
var jid = json.deleteprivategatewayresponse.jobid;
args.response.success(
{_custom:
{
jobId: jid
}
}
);
}
});
},
notification: {
poll: pollAsyncJobResult
}
},
replaceACL:{
label:'Replace ACL',
createForm:{
title:'Replace ACL',
label:'Replace ACL',
fields:{
aclid:{
label:'ACL',
select:function(args){
$.ajax({
url: createURL('listNetworkACLLists'),
dataType: 'json',
async: true,
success: function(json) {
var objs = json.listnetworkacllistsresponse.networkacllist;
var items = [];
$(objs).each(function() {
items.push({id: this.id, description: this.name});
});
args.response.success({data: items});
}
});
}
}
}
},
action: function(args) {
$.ajax({
url: createURL("replaceNetworkACLList&gatewayid=" + args.context.vpcGateways[0].id + "&aclid=" + args.data.aclid ),
dataType: "json",
success: function(json) {
var jid = json.replacenetworkacllistresponse.jobid;
args.response.success(
{_custom:
{
jobId: jid,
getUpdatedItem: function(json) {
var item = json.queryasyncjobresultresponse.jobresult.aclid;
return {data:item};
}
}
}
)
},
error:function(json){
args.response.error(parseXMLHttpResponse(json));
}
});
},
notification: {
poll: pollAsyncJobResult
},
messages: {
confirm: function(args) {
return 'Do you want to replace the ACL with a new one ?';
},
notification: function(args) {
return 'ACL replaced';
}
}
}
},
tabs: {
details: {
title: 'label.details',
fields: [
{
ipaddress: { label: 'label.ip.address' }
},
{
gateway: { label: 'label.gateway' },
netmask: { label: 'label.netmask'},
vlan: { label: 'label.vlan' },
state: { label: 'label.state' },
id: { label: 'label.id' },
zonename: { label: 'label.zone' },
domain: { label: 'label.domain' },
account: { label: 'label.account' },
sourcenatsupported:{
label: 'SourceNAT Supported' ,
converter: function(str) {
return str ? 'Yes' : 'No';
}
}
}
],
dataProvider: function(args) {
$.ajax({
url: createURL('listPrivateGateways'),
data: {
id: args.context.vpcGateways[0].id,
listAll: true
},
success: function(json) {
var item = json.listprivategatewaysresponse.privategateway[0];
args.response.success({
data: item,
actionFilter: function(args) {
var allowedActions = [];
if(isAdmin()) {
allowedActions.push("remove");
allowedActions.push("replaceACL");
}
return allowedActions;
}
});
}
});
}
},
staticRoutes: {
title: 'Static Routes',
custom: function(args) {
return $('<div>').multiEdit({
noSelect: true,
context: args.context,
fields: {
cidr: { edit: true, label: 'label.CIDR.of.destination.network' },
'add-rule': {
label: 'label.add.route',
addButton: true
}
},
tags: cloudStack.api.tags({ resourceType: 'StaticRoute', contextId: 'multiRule' }),
add: {
label: 'label.add',
action: function(args) {
$.ajax({
url: createURL('createStaticRoute'),
data: {
gatewayid: args.context.vpcGateways[0].id,
cidr: args.data.cidr
},
success: function(data) {
args.response.success({
_custom: {
jobId: data.createstaticrouteresponse.jobid
},
notification: {
label: 'label.add.static.route',
poll: pollAsyncJobResult
}
});
},
error: function(data) {
args.response.error(parseXMLHttpResponse(data));
}
});
}
},
actions: {
destroy: {
label: 'label.remove.static.route',
action: function(args) {
$.ajax({
url: createURL('deleteStaticRoute'),
data: {
id: args.context.multiRule[0].id
},
dataType: 'json',
async: true,
success: function(data) {
var jobID = data.deletestaticrouteresponse.jobid;
args.response.success({
_custom: {
jobId: jobID
},
notification: {
label: 'label.remove.static.route',
poll: pollAsyncJobResult
}
});
}
});
}
}
},
dataProvider: function(args) {
$.ajax({
url: createURL('listStaticRoutes'),
data: {
gatewayid: args.context.vpcGateways[0].id,
listAll: true
},
success: function(json) {
var items = json.liststaticroutesresponse.staticroute;
args.response.success({ data: items });
}
});
}
});
}
}
}
}
}
};
}
},
siteToSiteVPN: {
title: 'label.site.to.site.VPN',
id: 'siteToSiteVpn',
sectionSelect: {
preFilter: function(args) {
return ["vpnGateway", "vpnConnection"];
},
label: 'label.select-view'
},
// This is a custom add function -- does not show in list view
add: {
// Check if VPN gateways exist
// -- if false, don't show list view
preCheck: function(args) {
var items;
$.ajax({
url: createURL('listVpnGateways&listAll=true'),
data: {
vpcid: args.context.vpc[0].id
},
async: false,
success: function(json) {
items = json.listvpngatewaysresponse.vpngateway;
}
});
if (items && items.length) {
return true;
}
return false;
},
label: 'label.add.VPN.gateway',
messages: {
notification: function(args) {
return 'label.add.VPN.gateway';
}
},
createForm: {
title: 'label.add.VPN.gateway',
desc: 'message.add.VPN.gateway',
fields: {}
},
action: function(args) {
$.ajax({
url: createURL("createVpnGateway"),
data: {
vpcid: args.context.vpc[0].id
},
success: function(json) {
var jid = json.createvpngatewayresponse.jobid;
args.response.success(
{_custom:
{
jobId: jid,
getUpdatedItem: function(json) {
return json.queryasyncjobresultresponse.jobresult.vpngateway;
}
}
}
);
}
});
},
notification: {
poll: pollAsyncJobResult
}
},
sections: {
vpnGateway: {
type: 'select',
title: 'label.VPN.gateway',
listView: {
id: 'vpnGateway',
label: 'label.VPN.gateway',
fields: {
publicip: { label: 'label.ip.address' },
account: { label: 'label.account' },
domain: { label: 'label.domain' }
},
dataProvider: function(args) {
var array1 = [];
if(args.filterBy != null) {
if(args.filterBy.search != null && args.filterBy.search.by != null && args.filterBy.search.value != null) {
switch(args.filterBy.search.by) {
case "name":
if(args.filterBy.search.value.length > 0)
array1.push("&keyword=" + args.filterBy.search.value);
break;
}
}
}
$.ajax({
url: createURL("listVpnGateways&listAll=true&page=" + args.page + "&pagesize=" + pageSize + array1.join("")),
data: {
vpcid: args.context.vpc[0].id
},
async: false,
success: function(json) {
var items = json.listvpngatewaysresponse.vpngateway;
args.response.success({data: items});
}
});
},
detailView: {
name: 'label.details',
actions: {
remove: {
label: 'label.delete.VPN.gateway',
messages: {
confirm: function(args) {
return 'message.delete.VPN.gateway';
},
notification: function(args) {
return 'label.delete.VPN.gateway';
}
},
action: function(args) {
$.ajax({
url: createURL("deleteVpnGateway"),
data: {
id: args.context.vpnGateway[0].id
},
success: function(json) {
var jid = json.deletevpngatewayresponse.jobid;
args.response.success(
{_custom:
{
jobId: jid
}
}
);
}
});
},
notification: {
poll: pollAsyncJobResult
}
}
},
tabs: {
details: {
title: 'label.details',
fields: [
{
publicip: { label: 'label.ip.address' }
},
{
id: { label: 'label.id' },
domain: { label: 'label.domain' },
account: { label: 'label.account' }
}
],
dataProvider: function(args) {
$.ajax({
url: createURL("listVpnGateways"),
data: {
id: args.context.vpnGateway[0].id
},
async: true,
success: function(json) {
var item = json.listvpngatewaysresponse.vpngateway[0];
args.response.success({data: item});
}
});
}
}
}
}
}
},
vpnConnection: {
type: 'select',
title: 'label.VPN.connection',
listView: {
id: 'vpnConnection',
label: 'label.VPN.connection',
fields: {
publicip: { label: 'label.ip.address' },
gateway: { label: 'label.gateway' },
state: {
label: 'label.state',
indicator: {
'Connected': 'on',
'Disconnected': 'off',
'Error': 'off'
}
},
ipsecpsk: { label: 'label.IPsec.preshared.key' },
ikepolicy: { label: 'label.IKE.policy' },
esppolicy: { label: 'label.ESP.policy' }
},
dataProvider: function(args) {
var array1 = [];
if(args.filterBy != null) {
if(args.filterBy.search != null && args.filterBy.search.by != null && args.filterBy.search.value != null) {
switch(args.filterBy.search.by) {
case "name":
if(args.filterBy.search.value.length > 0)
array1.push("&keyword=" + args.filterBy.search.value);
break;
}
}
}
$.ajax({
url: createURL("listVpnConnections&listAll=true&page=" + args.page + "&pagesize=" + pageSize + array1.join("")),
data: {
vpcid: args.context.vpc[0].id
},
success: function(json) {
var items = json.listvpnconnectionsresponse.vpnconnection;
args.response.success({data:items});
}
});
},
actions:{
add: {
label: 'label.create.VPN.connection',
messages: {
notification: function(args) {
return 'label.create.VPN.connection';
}
},
createForm: {
title: 'label.create.VPN.connection',
fields: {
vpncustomergatewayid: {
label: 'label.VPN.customer.gateway',
validation: { required: true },
select: function(args) {
$.ajax({
url: createURL("listVpnCustomerGateways"),
data: {
listAll: true
},
success: function(json) {
var items = json.listvpncustomergatewaysresponse.vpncustomergateway;
args.response.success({
data: $.map(items, function(item) {
return {
id: item.id,
description: item.name
};
})
});
}
});
}
}
}
},
action: function(args) {
var vpngatewayid = null;
$.ajax({
url: createURL('listVpnGateways'),
data: {
vpcid: args.context.vpc[0].id
},
async: false,
success: function(json) {
var items = json.listvpngatewaysresponse.vpngateway;
if(items != null && items.length > 0) {
vpngatewayid = items[0].id;
}
}
});
if(vpngatewayid == null) {
args.response.error('The selected VPC does not have a VPN gateway. Please create a VPN gateway for the VPC first.');
return;
}
$.ajax({
url: createURL('createVpnConnection'),
data: {
s2svpngatewayid: vpngatewayid,
s2scustomergatewayid: args.data.vpncustomergatewayid
},
success: function(json) {
var jid = json.createvpnconnectionresponse.jobid;
args.response.success(
{_custom:
{
jobId: jid,
getUpdatedItem: function(json) {
return json.queryasyncjobresultresponse.jobresult.vpnconnection;
}
}
}
);
},
error: function(xmlHttpResponse) {
args.response.error(parseXMLHttpResponse(xmlHttpResponse));
}
});
},
notification: {
poll: pollAsyncJobResult
}
}
},
detailView: {
name: 'label.details',
tabs: {
details: {
title: 'label.details',
fields: [
{
id: { label: 'label.id' },
//s2svpngatewayid: { label: 'VPN gateway ID' },
publicip: { label: 'label.ip.address' },
//s2scustomergatewayid: { label: 'Customer gateway ID' },
gateway: { label: 'label.gateway' },
cidrlist: { label: 'label.CIDR.list' },
ipsecpsk: { label: 'label.IPsec.preshared.key' },
ikepolicy: { label: 'label.IKE.policy' },
esppolicy: { label: 'label.ESP.policy' },
ikelifetime: { label: 'label.IKE.lifetime' },
esplifetime: {label: 'label.ESP.lifetime' },
dpd: {
label: 'label.dead.peer.detection',
converter: function(str) {
return str ? 'Yes' : 'No';
}
},
state: {label: 'label.state' },
created: { label: 'label.date', converter: cloudStack.converters.toLocalDate }
}
],
dataProvider: function(args) {
$.ajax({
url: createURL("listVpnConnections&id=" + args.context.vpnConnection[0].id),
dataType: "json",
async: true,
success: function(json) {
var item = json.listvpnconnectionsresponse.vpnconnection[0];
args.response.success({data: item});
}
});
}
}
},
actions: {
restart: {
label: 'label.reset.VPN.connection',
messages: {
confirm: function(args) {
return 'message.reset.VPN.connection' ;
},
notification: function(args) {
return 'label.reset.VPN.connection';
}
},
action: function(args) {
$.ajax({
url: createURL("resetVpnConnection"),
data: {
id: args.context.vpnConnection[0].id
},
dataType: "json",
async: true,
success: function(json) {
var jid = json.resetvpnconnectionresponse.jobid;
args.response.success(
{_custom:
{
jobId: jid,
getUpdatedItem: function(json) {
return json.queryasyncjobresultresponse.jobresult.vpnconnection;
}
}
}
);
}
});
},
notification: {
poll: pollAsyncJobResult
}
},
remove: {
label: 'label.delete.VPN.connection',
messages: {
confirm: function(args) {
return 'message.delete.VPN.connection';
},
notification: function(args) {
return 'label.delete.VPN.connection';
}
},
action: function(args) {
$.ajax({
url: createURL("deleteVpnConnection"),
dataType: "json",
data: {
id: args.context.vpnConnection[0].id
},
async: true,
success: function(json) {
var jid = json.deletevpnconnectionresponse.jobid;
args.response.success(
{_custom:
{
jobId: jid,
getUpdatedItem: function(json) {
return json.queryasyncjobresultresponse.jobresult.vpnconnection;
}
}
}
);
}
});
},
notification: {
poll: pollAsyncJobResult
}
}
}
}
}
}
}
},
tiers: {
detailView: { //duplicate from cloudStack.sections.network.sections.networks.listView.detailView (begin)
name: 'Guest network details',
viewAll: {
path: 'network.ipAddresses',
label: 'label.menu.ipaddresses',
preFilter: function(args) {
if (args.context.networks[0].state == 'Destroyed')
return false;
var services = args.context.networks[0].service;
if(services == null)
return false;
if(args.context.networks[0].type == "Isolated") {
for(var i=0; i < services.length; i++) {
var service = services[i];
if(service.name == "SourceNat") {
return true;
}
}
}
else if(args.context.networks[0].type == "Shared") {
var havingSecurityGroupService = false;
var havingElasticIpCapability = false;
var havingElasticLbCapability = false;
for(var i=0; i < services.length; i++) {
var service = services[i];
if(service.name == "SecurityGroup") {
havingSecurityGroupService = true;
}
else if(service.name == "StaticNat") {
$(service.capability).each(function(){
if(this.name == "ElasticIp" && this.value == "true") {
havingElasticIpCapability = true;
return false; //break $.each() loop
}
});
}
else if(service.name == "Lb") {
$(service.capability).each(function(){
if(this.name == "ElasticLb" && this.value == "true") {
havingElasticLbCapability = true;
return false; //break $.each() loop
}
});
}
}
if(havingSecurityGroupService == true && havingElasticIpCapability == true && havingElasticLbCapability == true)
return true;
else
return false;
}
return false;
}
},
actions: {
edit: {
label: 'label.edit',
messages: {
notification: function(args) {
return 'label.edit.network.details';
}
},
action: function(args) {
var array1 = [];
array1.push("&name=" + todb(args.data.name));
array1.push("&displaytext=" + todb(args.data.displaytext));
//args.data.networkdomain is null when networkdomain field is hidden
if(args.data.networkdomain != null && args.data.networkdomain != args.context.networks[0].networkdomain)
array1.push("&networkdomain=" + todb(args.data.networkdomain));
//args.data.networkofferingid is null when networkofferingid field is hidden
if(args.data.networkofferingid != null && args.data.networkofferingid != args.context.networks[0].networkofferingid) {
array1.push("&networkofferingid=" + todb(args.data.networkofferingid));
if(args.context.networks[0].type == "Isolated") { //Isolated network
cloudStack.dialog.confirm({
message: 'Do you want to keep the current guest network CIDR unchanged?',
action: function() { //"Yes" button is clicked
array1.push("&changecidr=false");
$.ajax({
url: createURL("updateNetwork&id=" + args.context.networks[0].id + array1.join("")),
dataType: "json",
success: function(json) {
var jid = json.updatenetworkresponse.jobid;
args.response.success(
{_custom:
{jobId: jid,
getUpdatedItem: function(json) {
var item = json.queryasyncjobresultresponse.jobresult.network;
return {data: item};
}
}
}
);
},
error:function(json) {
args.response.error(parseXMLHttpResponse(json));
}
});
},
cancelAction: function() { //"Cancel" button is clicked
array1.push("&changecidr=true");
$.ajax({
url: createURL("updateNetwork&id=" + args.context.networks[0].id + array1.join("")),
dataType: "json",
success: function(json) {
var jid = json.updatenetworkresponse.jobid;
args.response.success(
{_custom:
{jobId: jid,
getUpdatedItem: function(json) {
var item = json.queryasyncjobresultresponse.jobresult.network;
return {data: item};
}
}
}
);
},
error:function(json) {
args.response.error(parseXMLHttpResponse(json));
}
});
}
});
return;
}
}
$.ajax({
url: createURL("updateNetwork&id=" + args.context.networks[0].id + array1.join("")),
dataType: "json",
success: function(json) {
var jid = json.updatenetworkresponse.jobid;
args.response.success(
{_custom:
{jobId: jid,
getUpdatedItem: function(json) {
var item = json.queryasyncjobresultresponse.jobresult.network;
return {data: item};
}
}
}
);
},
error:function(json) {
args.response.error(parseXMLHttpResponse(json));
}
});
},
notification: {
poll: pollAsyncJobResult
}
},
'restart': {
label: 'label.restart.network',
createForm: {
title: 'label.restart.network',
desc: 'message.restart.network',
preFilter: function(args) {
var zoneObj;
$.ajax({
url: createURL("listZones&id=" + args.context.networks[0].zoneid),
dataType: "json",
async: false,
success: function(json){
zoneObj = json.listzonesresponse.zone[0];
}
});
if(zoneObj.networktype == "Basic") {
args.$form.find('.form-item[rel=cleanup]').find('input').removeAttr('checked'); //unchecked
args.$form.find('.form-item[rel=cleanup]').hide(); //hidden
}
else {
args.$form.find('.form-item[rel=cleanup]').find('input').attr('checked', 'checked'); //checked
args.$form.find('.form-item[rel=cleanup]').css('display', 'inline-block'); //shown
}
},
fields: {
cleanup: {
label: 'label.clean.up',
isBoolean: true
}
}
},
messages: {
notification: function(args) {
return 'label.restart.network';
}
},
action: function(args) {
var array1 = [];
array1.push("&cleanup=" + (args.data.cleanup == "on"));
$.ajax({
url: createURL("restartNetwork&id=" + args.context.networks[0].id + array1.join("")),
dataType: "json",
async: true,
success: function(json) {
var jid = json.restartnetworkresponse.jobid;
args.response.success(
{_custom:
{jobId: jid,
getUpdatedItem: function(json) {
return json.queryasyncjobresultresponse.jobresult.network;
}
}
}
);
}
});
},
notification: {
poll: pollAsyncJobResult
}
},
remove: {
label: 'label.action.delete.network',
messages: {
confirm: function(args) {
return 'message.action.delete.network';
},
notification: function(args) {
return 'label.action.delete.network';
}
},
action: function(args) {
$.ajax({
url: createURL("deleteNetwork&id=" + args.context.networks[0].id),
dataType: "json",
async: true,
success: function(json) {
var jid = json.deletenetworkresponse.jobid;
args.response.success(
{_custom:
{jobId: jid
}
}
);
}
});
},
notification: {
poll: pollAsyncJobResult
}
}
},
tabFilter: function(args) {
var networkOfferingHavingELB = false;
$.ajax({
url: createURL("listNetworkOfferings&id=" + args.context.networks[0].networkofferingid),
dataType: "json",
async: false,
success: function(json) {
var networkoffering = json.listnetworkofferingsresponse.networkoffering[0];
$(networkoffering.service).each(function(){
var thisService = this;
if(thisService.name == "Lb") {
$(thisService.capability).each(function(){
if(this.name == "ElasticLb" && this.value == "true") {
networkOfferingHavingELB = true;
return false; //break $.each() loop
}
});
return false; //break $.each() loop
}
});
}
});
var hiddenTabs = ['ipAddresses']; // Disable IP address tab; it is redundant with 'view all' button
if(networkOfferingHavingELB == false)
hiddenTabs.push("addloadBalancer");
return hiddenTabs;
},
isMaximized: true,
tabs: {
details: {
title: 'Network Details',
preFilter: function(args) {
var hiddenFields = [];
var zone;
$.ajax({
url: createURL('listZones'),
data: {
id: args.context.networks[0].zoneid
},
async: false,
success: function(json) {
zone = json.listzonesresponse.zone[0];
}
});
if(zone.networktype == "Basic") {
hiddenFields.push("account");
hiddenFields.push("gateway");
hiddenFields.push("vlan");
hiddenFields.push("cidr");
//hiddenFields.push("netmask");
}
if(args.context.networks[0].type == "Isolated") {
hiddenFields.push("networkofferingdisplaytext");
hiddenFields.push("networkdomaintext");
hiddenFields.push("gateway");
hiddenFields.push("networkofferingname");
//hiddenFields.push("netmask");
}
else { //selectedGuestNetworkObj.type == "Shared"
hiddenFields.push("networkofferingid");
hiddenFields.push("networkdomain");
}
return hiddenFields;
},
fields: [
{
name: {
label: 'label.name',
isEditable: true
}
},
{
id: { label: 'label.id' },
zonename: { label: 'label.zone' },
displaytext: {
label: 'label.description',
isEditable: true
},
type: {
label: 'label.type'
},
state: {
label: 'label.state'
},
ispersistent:{
label:'Persistent ',
converter:cloudStack.converters.toBooleanText
},
restartrequired: {
label: 'label.restart.required',
converter: function(booleanValue) {
if(booleanValue == true)
return "<font color='red'>Yes</font>";
else if(booleanValue == false)
return "No";
}
},
vlan: { label: 'label.vlan.id' },
networkofferingname: { label: 'label.network.offering' },
networkofferingid: {
label: 'label.network.offering',
isEditable: true,
select: function(args){
if (args.context.networks[0].state == 'Destroyed') {
args.response.success({ data: [] });
return;
}
var items = [];
$.ajax({
url: createURL("listNetworkOfferings&networkid=" + args.context.networks[0].id),
dataType: "json",
async: false,
success: function(json) {
var networkOfferingObjs = json.listnetworkofferingsresponse.networkoffering;
$(networkOfferingObjs).each(function() {
items.push({id: this.id, description: this.displaytext});
});
}
});
$.ajax({
url: createURL("listNetworkOfferings&id=" + args.context.networks[0].networkofferingid), //include currently selected network offeirng to dropdown
dataType: "json",
async: false,
success: function(json) {
var networkOfferingObjs = json.listnetworkofferingsresponse.networkoffering;
$(networkOfferingObjs).each(function() {
items.push({id: this.id, description: this.displaytext});
});
}
});
args.response.success({data: items});
}
},
gateway: { label: 'label.gateway' },
//netmask: { label: 'Netmask' },
cidr: { label: 'label.cidr' },
networkdomaintext: {
label: 'label.network.domain.text'
},
networkdomain: {
label: 'label.network.domain',
isEditable: true
},
domain: { label: 'label.domain' },
account: { label: 'label.account' }
}
],
dataProvider: function(args) {
$.ajax({
url: createURL("listNetworks&id=" + args.context.networks[0].id + "&listAll=true"), //pass "&listAll=true" to "listNetworks&id=xxxxxxxx" for now before API gets fixed.
data: { listAll: true },
dataType: "json",
async: true,
success: function(json) {
var jsonObj = json.listnetworksresponse.network[0];
args.response.success(
{
actionFilter: cloudStack.actionFilter.guestNetwork,
data: jsonObj
}
);
}
});
}
},
acl: {
title: 'label.network.ACL',
custom: function(args) {
// Widget renders ACL multi-edit, overriding this fn
return $('<div>');
}
},
ipAddresses: {
title: 'label.menu.ipaddresses',
custom: function(args) {
// Widget renders IP addresses, overriding this fn
return $('<div>');
}
},
addloadBalancer: {
title: 'label.add.load.balancer',
custom: function(args) {
var context = args.context;
return $('<div>').multiEdit(
{
context: context,
listView: $.extend(true, {}, cloudStack.sections.instances, {
listView: {
dataProvider: function(args) {
var networkid;
if('vpc' in args.context)
networkid = args.context.multiData.tier;
else
networkid = args.context.ipAddresses[0].associatednetworkid;
var data = {
page: args.page,
pageSize: pageSize,
networkid: networkid,
listAll: true
};
$.ajax({
url: createURL('listVirtualMachines'),
data: data,
dataType: 'json',
async: true,
success: function(data) {
args.response.success({
data: $.grep(
data.listvirtualmachinesresponse.virtualmachine ?
data.listvirtualmachinesresponse.virtualmachine : [],
function(instance) {
return $.inArray(instance.state, [
'Destroyed'
]) == -1;
}
)
});
},
error: function(data) {
args.response.error(parseXMLHttpResponse(data));
}
});
}
}
}),
multipleAdd: true,
fields: {
'name': { edit: true, label: 'label.name' },
'publicport': { edit: true, label: 'label.public.port' },
'privateport': { edit: true, label: 'label.private.port' },
'algorithm': {
label: 'label.algorithm',
select: function(args) {
args.response.success({
data: [
{ name: 'roundrobin', description: _l('label.round.robin') },
{ name: 'leastconn', description: _l('label.least.connections') },
{ name: 'source', description: _l('label.source') }
]
});
}
},
'sticky': {
label: 'label.stickiness',
custom: {
buttonLabel: 'label.configure',
action: cloudStack.lbStickyPolicy.dialog()
}
},
'add-vm': {
label: 'label.add.vm',
addButton: true
}
},
add: {
label: 'label.add.vm',
action: function(args) {
var data = {
algorithm: args.data.algorithm,
name: args.data.name,
privateport: args.data.privateport,
publicport: args.data.publicport,
openfirewall: false,
domainid: g_domainid,
account: g_account
};
if('vpc' in args.context) { //from VPC section
if(args.data.tier == null) {
args.response.error('Tier is required');
return;
}
$.extend(data, {
networkid: args.data.tier
});
}
else { //from Guest Network section
$.extend(data, {
networkid: args.context.networks[0].id
});
}
var stickyData = $.extend(true, {}, args.data.sticky);
$.ajax({
url: createURL('createLoadBalancerRule'),
data: data,
dataType: 'json',
async: true,
success: function(data) {
var itemData = args.itemData;
var jobID = data.createloadbalancerruleresponse.jobid;
$.ajax({
url: createURL('assignToLoadBalancerRule'),
data: {
id: data.createloadbalancerruleresponse.id,
virtualmachineids: $.map(itemData, function(elem) {
return elem.id;
}).join(',')
},
dataType: 'json',
async: true,
success: function(data) {
var lbCreationComplete = false;
args.response.success({
_custom: {
jobId: jobID
},
notification: {
label: 'label.add.load.balancer',
poll: function(args) {
var complete = args.complete;
var error = args.error;
pollAsyncJobResult({
_custom: args._custom,
complete: function(args) {
if (lbCreationComplete) {
return;
}
lbCreationComplete = true;
cloudStack.dialog.notice({
message: _l('message.add.load.balancer.under.ip') +
args.data.loadbalancer.publicip
});
if (stickyData &&
stickyData.methodname &&
stickyData.methodname != 'None') {
cloudStack.lbStickyPolicy.actions.add(
args.data.loadbalancer.id,
stickyData,
complete, // Complete
complete // Error
);
} else {
complete();
}
},
error: error
});
}
}
});
},
error: function(data) {
args.response.error(parseXMLHttpResponse(data));
}
});
},
error: function(data) {
args.response.error(parseXMLHttpResponse(data));
}
});
}
},
dataProvider: function(args) {
args.response.success({ //no LB listing in AddLoadBalancer tab
data: []
});
}
}
);
}
}
}
}, //duplicate from cloudStack.sections.network.sections.networks.listView.detailView (begin)
actionPreFilter: function(args) {
var tier = args.context.networks[0];
var state = tier.state;
return state == 'Running' ? ['start'] : ['stop'];
},
actions: {
add: {
label: 'label.add.new.tier',
createForm: {
title: 'label.add.new.tier',
fields: {
name: {
label: 'label.name',
validation: { required: true },
docID: 'helpTierName'
},
networkOfferingId: {
label: 'label.network.offering',
docID: 'helpTierNetworkOffering',
validation: { required: true },
dependsOn: 'zoneId',
select: function(args) {
var networkSupportingLbExists = false;
$.ajax({
url: createURL('listNetworks'),
data: {
vpcid: args.context.vpc[0].id,
supportedservices: 'LB'
},
success: function(json) {
var networkSupportingLbExists;
if(json.listnetworksresponse.network != null && json.listnetworksresponse.network.length > 0)
networkSupportingLbExists = true;
else
networkSupportingLbExists = false;
$.ajax({
url: createURL('listNetworkOfferings'),
data: {
forvpc: true,
zoneid: args.zoneId,
guestiptype: 'Isolated',
supportedServices: 'SourceNat',
specifyvlan: false,
state: 'Enabled'
},
success: function(json) {
var networkOfferings = json.listnetworkofferingsresponse.networkoffering;
var items;
if(networkSupportingLbExists == true) {
items = $.grep(networkOfferings, function(networkOffering) {
var includingLbService = false;
$(networkOffering.service).each(function(){
var thisService = this;
if(thisService.name == "Lb") {
includingLbService = true;
return false; //break $.each() loop
}
});
return !includingLbService;
});
}
else {
items = networkOfferings;
}
args.response.success({
data: $.map(items, function(item) {
return {
id: item.id,
description: item.name
};
})
});
}
});
}
});
}
},
gateway: {
label: 'label.gateway',
docID: 'helpTierGateway',
validation: { required: true }
},
netmask: {
label: 'label.netmask',
docID: 'helpTierNetmask',
validation: { required: true }
}
}
},
action: function(args) {
var dataObj = {
zoneId: args.context.vpc[0].zoneid,
vpcid: args.context.vpc[0].id,
domainid: args.context.vpc[0].domainid,
account: args.context.vpc[0].account,
networkOfferingId: args.data.networkOfferingId,
name: args.data.name,
displayText: args.data.name,
gateway: args.data.gateway,
netmask: args.data.netmask
};
$.ajax({
url: createURL('createNetwork'),
dataType: 'json',
data: dataObj,
success: function(json) {
args.response.success({
data: json.createnetworkresponse.network
});
},
error: function(XMLHttpResponse) {
args.response.error(parseXMLHttpResponse(XMLHttpResponse));
}
});
},
messages: {
notification: function() { return 'Add new tier'; }
}
},
/*
start: {
label: 'Start tier',
shortLabel: 'Start',
action: function(args) {
args.response.success();
},
notification: {
poll: function(args) { args.complete({ data: { state: 'Running' } }); }
}
},
*/
/*
stop: {
label: 'Stop tier',
shortLabel: 'Stop',
action: function(args) {
args.response.success();
},
notification: {
poll: function(args) { args.complete({ data: { state: 'Stopped' } }); }
}
},
*/
addVM: {
label: 'label.add.VM.to.tier',
shortLabel: 'label.add.vm',
action: cloudStack.uiCustom.instanceWizard(
$.extend(true, {}, cloudStack.instanceWizard, {
pluginForm: {
name: 'vpcTierInstanceWizard'
}
})
),
notification: {
poll: pollAsyncJobResult
}
},
// Removing ACL buttons from the tier chart
/* acl: {
label: 'Configure ACL for tier',
shortLabel: 'ACL',
multiEdit: aclMultiEdit
}, */
remove: {
label: 'label.remove.tier',
action: function(args) {
$.ajax({
url: createURL('deleteNetwork'),
dataType: "json",
data: {
id: args.context.networks[0].id
},
success: function(json) {
var jid = json.deletenetworkresponse.jobid;
args.response.success(
{_custom:
{jobId: jid
}
}
);
}
});
},
notification: {
poll: pollAsyncJobResult
}
}
},
// Get tiers
dataProvider: function(args) {
$.ajax({
url: createURL("listNetworks"),
dataType: "json",
data: {
vpcid: args.context.vpc[0].id,
//listAll: true, //do not pass listAll to listNetworks under VPC
domainid: args.context.vpc[0].domainid,
account: args.context.vpc[0].account
},
async: true,
success: function(json) {
var networks = json.listnetworksresponse.network;
if(networks != null && networks.length > 0) {
for(var i = 0; i < networks.length; i++) {
$.ajax({
url: createURL("listVirtualMachines"),
dataType: "json",
data: {
networkid: networks[i].id,
listAll: true
},
async: false,
success: function(json) {
networks[i].virtualMachines = json.listvirtualmachinesresponse.virtualmachine;
}
});
}
}
args.response.success({ tiers: networks });
}
});
}
}
};
}(jQuery, cloudStack));
| CLOUDSTACK-768:ACLs on Private GATEWAY :Acl id on Private Gateway ListView
| ui/scripts/vpc.js | CLOUDSTACK-768:ACLs on Private GATEWAY :Acl id on Private Gateway ListView | <ide><path>i/scripts/vpc.js
<ide> converter: function(str) {
<ide> return str ? 'Yes' : 'No';
<ide> }
<del> }
<add> },
<add> aclid:{label:'ACL id'}
<add>
<ide>
<ide> }
<ide> ], |
|
JavaScript | apache-2.0 | e5c1091f97d1c35867a0eda87fe10ebd581939ea | 0 | brightcove/shaka-player,brightcove/shaka-player,brightcove/shaka-player | /**
* @license
* Copyright 2016 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Loads the library. Chooses compiled or debug version of the library based
* on the presence or absence of the URL parameter "compiled".
*
* This dynamic loading process is not necessary in a production environment,
* but greatly simplifies the process of switching between compiled and
* uncompiled mode during development.
*
* This is used in the provided demo app, but can also be used to load the
* uncompiled version of the library into your own application environment.
*/
(function() { // anonymous namespace
// The sources may be in a different folder from the app.
// Compute the base URL for all library sources.
var currentScript = document.currentScript ||
document.scripts[document.scripts.length - 1];
var loaderSrc = currentScript.src;
var baseUrl = loaderSrc.split('/').slice(0, -1).join('/') + '/';
function loadRelativeScript(src) {
importScript(baseUrl + src);
}
function importScript(src) {
var script = document.createElement('script');
script.type = 'text/javascript';
script.src = src;
script.defer = true;
// Setting async = false is important to make sure the script is imported
// before the 'load' event fires.
script.async = false;
document.head.appendChild(script);
}
window.CLOSURE_IMPORT_SCRIPT = importScript;
var fields = location.search.substr(1);
fields = fields ? fields.split(';') : [];
var fragments = location.hash.substr(1);
fragments = fragments ? fragments.split(';') : [];
var combined = fields.concat(fragments);
// Very old browsers do not have Array.prototype.indexOf.
var compiledMode = false;
for (var i = 0; i < combined.length; ++i) {
if (combined[i] == 'compiled') {
compiledMode = true;
break;
}
}
if (compiledMode) {
// This contains the entire library.
loadRelativeScript('../dist/shaka-player.compiled.js');
} else {
// In non-compiled mode, we load the closure library and the generated deps
// file to bootstrap the system. goog.require will load the rest.
loadRelativeScript('../third_party/closure/goog/base.js');
loadRelativeScript('../dist/deps.js');
// This file contains goog.require calls for all exported classes.
loadRelativeScript('../shaka-player.uncompiled.js');
}
})(); // anonymous namespace
| demo/load.js | /**
* @license
* Copyright 2016 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Loads the library. Chooses compiled or debug version of the library based
* on the presence or absence of the URL parameter "compiled".
*
* This dynamic loading process is not necessary in a production environment,
* but greatly simplifies the process of switching between compiled and
* uncompiled mode during development.
*
* This is used in the provided demo app, but can also be used to load the
* uncompiled version of the library into your own application environment.
*/
(function() { // anonymous namespace
// The sources may be in a different folder from the app.
// Compute the base URL for all library sources.
var currentScript = document.currentScript ||
document.scripts[document.scripts.length - 1];
var loaderSrc = currentScript.src;
var baseUrl = loaderSrc.split('/').slice(0, -1).join('/') + '/';
function loadScript(src) {
// This does not seem like it would be the best way to do this, but the
// timing is different than creating a new script element and appending
// it to the head element. This way, all script loading happens before
// DOMContentLoaded. This is also compatible with goog.require's loading
// mechanism, whereas appending an element to head isn't.
document.write('<script src="' + baseUrl + src + '"></script>');
}
var fields = location.search.substr(1);
fields = fields ? fields.split(';') : [];
var fragments = location.hash.substr(1);
fragments = fragments ? fragments.split(';') : [];
var combined = fields.concat(fragments);
// Very old browsers do not have Array.prototype.indexOf.
var compiledMode = false;
for (var i = 0; i < combined.length; ++i) {
if (combined[i] == 'compiled') {
compiledMode = true;
break;
}
}
if (compiledMode) {
// This contains the entire library.
loadScript('../dist/shaka-player.compiled.js');
} else {
// In non-compiled mode, we load the closure library and the generated deps
// file to bootstrap the system. goog.require will load the rest.
loadScript('../third_party/closure/goog/base.js');
loadScript('../dist/deps.js');
// This file contains goog.require calls for all exported classes.
loadScript('../shaka-player.uncompiled.js');
}
})(); // anonymous namespace
| Clean up demo loader
The loader script for the demo uses document.write to add script tags.
This is not ideal, and currently generates these warnings in Chrome:
[Violation] Parser was blocked due to document.write(<script>)
[Violation] Avoid using document.write().
This replaces the import mechanism and hooks it into Closure library.
This also fixes uncompiled mode on Cobalt, which does not have
document.write. (http://cobalt.foo/)
Fixes: 37246436
Change-Id: I4b8acf41edb314be359bca42d224112020c30709
| demo/load.js | Clean up demo loader | <ide><path>emo/load.js
<ide> var loaderSrc = currentScript.src;
<ide> var baseUrl = loaderSrc.split('/').slice(0, -1).join('/') + '/';
<ide>
<del> function loadScript(src) {
<del> // This does not seem like it would be the best way to do this, but the
<del> // timing is different than creating a new script element and appending
<del> // it to the head element. This way, all script loading happens before
<del> // DOMContentLoaded. This is also compatible with goog.require's loading
<del> // mechanism, whereas appending an element to head isn't.
<del> document.write('<script src="' + baseUrl + src + '"></script>');
<add> function loadRelativeScript(src) {
<add> importScript(baseUrl + src);
<ide> }
<add>
<add> function importScript(src) {
<add> var script = document.createElement('script');
<add> script.type = 'text/javascript';
<add> script.src = src;
<add> script.defer = true;
<add> // Setting async = false is important to make sure the script is imported
<add> // before the 'load' event fires.
<add> script.async = false;
<add> document.head.appendChild(script);
<add> }
<add> window.CLOSURE_IMPORT_SCRIPT = importScript;
<ide>
<ide> var fields = location.search.substr(1);
<ide> fields = fields ? fields.split(';') : [];
<ide>
<ide> if (compiledMode) {
<ide> // This contains the entire library.
<del> loadScript('../dist/shaka-player.compiled.js');
<add> loadRelativeScript('../dist/shaka-player.compiled.js');
<ide> } else {
<ide> // In non-compiled mode, we load the closure library and the generated deps
<ide> // file to bootstrap the system. goog.require will load the rest.
<del> loadScript('../third_party/closure/goog/base.js');
<del> loadScript('../dist/deps.js');
<add> loadRelativeScript('../third_party/closure/goog/base.js');
<add> loadRelativeScript('../dist/deps.js');
<ide> // This file contains goog.require calls for all exported classes.
<del> loadScript('../shaka-player.uncompiled.js');
<add> loadRelativeScript('../shaka-player.uncompiled.js');
<ide> }
<ide> })(); // anonymous namespace |
|
Java | apache-2.0 | 7ffa2a013be4401df24455c2cefa2a52af5eebbe | 0 | alibaba/fastjson,alibaba/fastjson,alibaba/fastjson,alibaba/fastjson | /*
* Copyright 1999-2017 Alibaba Group.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.fastjson;
import static com.alibaba.fastjson.util.TypeUtils.castToBigDecimal;
import static com.alibaba.fastjson.util.TypeUtils.castToBigInteger;
import static com.alibaba.fastjson.util.TypeUtils.castToBoolean;
import static com.alibaba.fastjson.util.TypeUtils.castToByte;
import static com.alibaba.fastjson.util.TypeUtils.castToDate;
import static com.alibaba.fastjson.util.TypeUtils.castToDouble;
import static com.alibaba.fastjson.util.TypeUtils.castToFloat;
import static com.alibaba.fastjson.util.TypeUtils.castToInt;
import static com.alibaba.fastjson.util.TypeUtils.castToLong;
import static com.alibaba.fastjson.util.TypeUtils.castToShort;
import static com.alibaba.fastjson.util.TypeUtils.castToSqlDate;
import static com.alibaba.fastjson.util.TypeUtils.castToString;
import static com.alibaba.fastjson.util.TypeUtils.castToTimestamp;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.Serializable;
import java.lang.reflect.Type;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.*;
import com.alibaba.fastjson.parser.ParserConfig;
import com.alibaba.fastjson.util.TypeUtils;
/**
* @author wenshao[[email protected]]
*/
public class JSONArray extends JSON implements List<Object>, Cloneable, RandomAccess, Serializable {
private static final long serialVersionUID = 1L;
private final List<Object> list;
protected transient Object relatedArray;
protected transient Type componentType;
public JSONArray(){
this.list = new ArrayList<Object>();
}
public JSONArray(List<Object> list){
if (list == null){
throw new IllegalArgumentException("list is null.");
}
this.list = list;
}
public JSONArray(int initialCapacity){
this.list = new ArrayList<Object>(initialCapacity);
}
/**
* @since 1.1.16
* @return
*/
public Object getRelatedArray() {
return relatedArray;
}
public void setRelatedArray(Object relatedArray) {
this.relatedArray = relatedArray;
}
public Type getComponentType() {
return componentType;
}
public void setComponentType(Type componentType) {
this.componentType = componentType;
}
public int size() {
return list.size();
}
public boolean isEmpty() {
return list.isEmpty();
}
public boolean contains(Object o) {
return list.contains(o);
}
public Iterator<Object> iterator() {
return list.iterator();
}
public Object[] toArray() {
return list.toArray();
}
public <T> T[] toArray(T[] a) {
return list.toArray(a);
}
public boolean add(Object e) {
return list.add(e);
}
public JSONArray fluentAdd(Object e) {
list.add(e);
return this;
}
public boolean remove(Object o) {
return list.remove(o);
}
public JSONArray fluentRemove(Object o) {
list.remove(o);
return this;
}
public boolean containsAll(Collection<?> c) {
return list.containsAll(c);
}
public boolean addAll(Collection<? extends Object> c) {
return list.addAll(c);
}
public JSONArray fluentAddAll(Collection<? extends Object> c) {
list.addAll(c);
return this;
}
public boolean addAll(int index, Collection<? extends Object> c) {
return list.addAll(index, c);
}
public JSONArray fluentAddAll(int index, Collection<? extends Object> c) {
list.addAll(index, c);
return this;
}
public boolean removeAll(Collection<?> c) {
return list.removeAll(c);
}
public JSONArray fluentRemoveAll(Collection<?> c) {
list.removeAll(c);
return this;
}
public boolean retainAll(Collection<?> c) {
return list.retainAll(c);
}
public JSONArray fluentRetainAll(Collection<?> c) {
list.retainAll(c);
return this;
}
public void clear() {
list.clear();
}
public JSONArray fluentClear() {
list.clear();
return this;
}
public Object set(int index, Object element) {
if (index == -1) {
list.add(element);
return null;
}
if (list.size() <= index) {
for (int i = list.size(); i < index; ++i) {
list.add(null);
}
list.add(element);
return null;
}
return list.set(index, element);
}
public JSONArray fluentSet(int index, Object element) {
set(index, element);
return this;
}
public void add(int index, Object element) {
list.add(index, element);
}
public JSONArray fluentAdd(int index, Object element) {
list.add(index, element);
return this;
}
public Object remove(int index) {
return list.remove(index);
}
public JSONArray fluentRemove(int index) {
list.remove(index);
return this;
}
public int indexOf(Object o) {
return list.indexOf(o);
}
public int lastIndexOf(Object o) {
return list.lastIndexOf(o);
}
public ListIterator<Object> listIterator() {
return list.listIterator();
}
public ListIterator<Object> listIterator(int index) {
return list.listIterator(index);
}
public List<Object> subList(int fromIndex, int toIndex) {
return list.subList(fromIndex, toIndex);
}
public Object get(int index) {
return list.get(index);
}
public JSONObject getJSONObject(int index) {
Object value = list.get(index);
if (value instanceof JSONObject) {
return (JSONObject) value;
}
if (value instanceof Map) {
return new JSONObject((Map) value);
}
return (JSONObject) toJSON(value);
}
public JSONArray getJSONArray(int index) {
Object value = list.get(index);
if (value instanceof JSONArray) {
return (JSONArray) value;
}
if (value instanceof List) {
return new JSONArray((List) value);
}
return (JSONArray) toJSON(value);
}
public <T> T getObject(int index, Class<T> clazz) {
Object obj = list.get(index);
return TypeUtils.castToJavaBean(obj, clazz);
}
public <T> T getObject(int index, Type type) {
Object obj = list.get(index);
if (type instanceof Class) {
return (T) TypeUtils.castToJavaBean(obj, (Class) type);
} else {
String json = JSON.toJSONString(obj);
return (T) JSON.parseObject(json, type);
}
}
public Boolean getBoolean(int index) {
Object value = get(index);
if (value == null) {
return null;
}
return castToBoolean(value);
}
public boolean getBooleanValue(int index) {
Object value = get(index);
if (value == null) {
return false;
}
return castToBoolean(value).booleanValue();
}
public Byte getByte(int index) {
Object value = get(index);
return castToByte(value);
}
public byte getByteValue(int index) {
Object value = get(index);
Byte byteVal = castToByte(value);
if (byteVal == null) {
return 0;
}
return byteVal.byteValue();
}
public Short getShort(int index) {
Object value = get(index);
return castToShort(value);
}
public short getShortValue(int index) {
Object value = get(index);
Short shortVal = castToShort(value);
if (shortVal == null) {
return 0;
}
return shortVal.shortValue();
}
public Integer getInteger(int index) {
Object value = get(index);
return castToInt(value);
}
public int getIntValue(int index) {
Object value = get(index);
Integer intVal = castToInt(value);
if (intVal == null) {
return 0;
}
return intVal.intValue();
}
public Long getLong(int index) {
Object value = get(index);
return castToLong(value);
}
public long getLongValue(int index) {
Object value = get(index);
Long longVal = castToLong(value);
if (longVal == null) {
return 0L;
}
return longVal.longValue();
}
public Float getFloat(int index) {
Object value = get(index);
return castToFloat(value);
}
public float getFloatValue(int index) {
Object value = get(index);
Float floatValue = castToFloat(value);
if (floatValue == null) {
return 0F;
}
return floatValue.floatValue();
}
public Double getDouble(int index) {
Object value = get(index);
return castToDouble(value);
}
public double getDoubleValue(int index) {
Object value = get(index);
Double doubleValue = castToDouble(value);
if (doubleValue == null) {
return 0D;
}
return doubleValue.doubleValue();
}
public BigDecimal getBigDecimal(int index) {
Object value = get(index);
return castToBigDecimal(value);
}
public BigInteger getBigInteger(int index) {
Object value = get(index);
return castToBigInteger(value);
}
public String getString(int index) {
Object value = get(index);
return castToString(value);
}
public java.util.Date getDate(int index) {
Object value = get(index);
return castToDate(value);
}
public java.sql.Date getSqlDate(int index) {
Object value = get(index);
return castToSqlDate(value);
}
public java.sql.Timestamp getTimestamp(int index) {
Object value = get(index);
return castToTimestamp(value);
}
/**
* @since 1.2.23
*/
public <T> List<T> toJavaList(Class<T> clazz) {
List<T> list = new ArrayList<T>(this.size());
ParserConfig config = ParserConfig.getGlobalInstance();
for (Object item : this) {
T classItem = (T) TypeUtils.cast(item, clazz, config);
list.add(classItem);
}
return list;
}
@Override
public Object clone() {
return new JSONArray(new ArrayList<Object>(list));
}
public boolean equals(Object obj) {
return this.list.equals(obj);
}
public int hashCode() {
return this.list.hashCode();
}
private void readObject(final java.io.ObjectInputStream in) throws IOException, ClassNotFoundException {
JSONObject.SecureObjectInputStream.ensureFields();
if (JSONObject.SecureObjectInputStream.fields != null && !JSONObject.SecureObjectInputStream.fields_error) {
ObjectInputStream secIn = new JSONObject.SecureObjectInputStream(in);
try {
secIn.defaultReadObject();
return;
} catch (java.io.NotActiveException e) {
// skip
}
}
in.defaultReadObject();
for (Object item : list) {
if (item == null) {
continue;
}
String typeName = item.getClass().getName();
if (TypeUtils.getClassFromMapping(typeName) == null) {
ParserConfig.global.checkAutoType(typeName, null);
}
}
}
}
| src/main/java/com/alibaba/fastjson/JSONArray.java | /*
* Copyright 1999-2017 Alibaba Group.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.fastjson;
import static com.alibaba.fastjson.util.TypeUtils.castToBigDecimal;
import static com.alibaba.fastjson.util.TypeUtils.castToBigInteger;
import static com.alibaba.fastjson.util.TypeUtils.castToBoolean;
import static com.alibaba.fastjson.util.TypeUtils.castToByte;
import static com.alibaba.fastjson.util.TypeUtils.castToDate;
import static com.alibaba.fastjson.util.TypeUtils.castToDouble;
import static com.alibaba.fastjson.util.TypeUtils.castToFloat;
import static com.alibaba.fastjson.util.TypeUtils.castToInt;
import static com.alibaba.fastjson.util.TypeUtils.castToLong;
import static com.alibaba.fastjson.util.TypeUtils.castToShort;
import static com.alibaba.fastjson.util.TypeUtils.castToSqlDate;
import static com.alibaba.fastjson.util.TypeUtils.castToString;
import static com.alibaba.fastjson.util.TypeUtils.castToTimestamp;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.Serializable;
import java.lang.reflect.Type;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.*;
import com.alibaba.fastjson.parser.ParserConfig;
import com.alibaba.fastjson.util.TypeUtils;
/**
* @author wenshao[[email protected]]
*/
public class JSONArray extends JSON implements List<Object>, Cloneable, RandomAccess, Serializable {
private static final long serialVersionUID = 1L;
private final List<Object> list;
protected transient Object relatedArray;
protected transient Type componentType;
public JSONArray(){
this.list = new ArrayList<Object>();
}
public JSONArray(List<Object> list){
if (list == null){
throw new IllegalArgumentException("list is null.");
}
this.list = list;
}
public JSONArray(int initialCapacity){
this.list = new ArrayList<Object>(initialCapacity);
}
/**
* @since 1.1.16
* @return
*/
public Object getRelatedArray() {
return relatedArray;
}
public void setRelatedArray(Object relatedArray) {
this.relatedArray = relatedArray;
}
public Type getComponentType() {
return componentType;
}
public void setComponentType(Type componentType) {
this.componentType = componentType;
}
public int size() {
return list.size();
}
public boolean isEmpty() {
return list.isEmpty();
}
public boolean contains(Object o) {
return list.contains(o);
}
public Iterator<Object> iterator() {
return list.iterator();
}
public Object[] toArray() {
return list.toArray();
}
public <T> T[] toArray(T[] a) {
return list.toArray(a);
}
public boolean add(Object e) {
return list.add(e);
}
public JSONArray fluentAdd(Object e) {
list.add(e);
return this;
}
public boolean remove(Object o) {
return list.remove(o);
}
public JSONArray fluentRemove(Object o) {
list.remove(o);
return this;
}
public boolean containsAll(Collection<?> c) {
return list.containsAll(c);
}
public boolean addAll(Collection<? extends Object> c) {
return list.addAll(c);
}
public JSONArray fluentAddAll(Collection<? extends Object> c) {
list.addAll(c);
return this;
}
public boolean addAll(int index, Collection<? extends Object> c) {
return list.addAll(index, c);
}
public JSONArray fluentAddAll(int index, Collection<? extends Object> c) {
list.addAll(index, c);
return this;
}
public boolean removeAll(Collection<?> c) {
return list.removeAll(c);
}
public JSONArray fluentRemoveAll(Collection<?> c) {
list.removeAll(c);
return this;
}
public boolean retainAll(Collection<?> c) {
return list.retainAll(c);
}
public JSONArray fluentRetainAll(Collection<?> c) {
list.retainAll(c);
return this;
}
public void clear() {
list.clear();
}
public JSONArray fluentClear() {
list.clear();
return this;
}
public Object set(int index, Object element) {
if (index == -1) {
list.add(element);
return null;
}
if (list.size() <= index) {
for (int i = list.size(); i < index; ++i) {
list.add(null);
}
list.add(element);
return null;
}
return list.set(index, element);
}
public JSONArray fluentSet(int index, Object element) {
set(index, element);
return this;
}
public void add(int index, Object element) {
list.add(index, element);
}
public JSONArray fluentAdd(int index, Object element) {
list.add(index, element);
return this;
}
public Object remove(int index) {
return list.remove(index);
}
public JSONArray fluentRemove(int index) {
list.remove(index);
return this;
}
public int indexOf(Object o) {
return list.indexOf(o);
}
public int lastIndexOf(Object o) {
return list.lastIndexOf(o);
}
public ListIterator<Object> listIterator() {
return list.listIterator();
}
public ListIterator<Object> listIterator(int index) {
return list.listIterator(index);
}
public List<Object> subList(int fromIndex, int toIndex) {
return list.subList(fromIndex, toIndex);
}
public Object get(int index) {
return list.get(index);
}
public JSONObject getJSONObject(int index) {
Object value = list.get(index);
if (value instanceof JSONObject) {
return (JSONObject) value;
}
if (value instanceof Map) {
return new JSONObject((Map) value);
}
return (JSONObject) toJSON(value);
}
public JSONArray getJSONArray(int index) {
Object value = list.get(index);
if (value instanceof JSONArray) {
return (JSONArray) value;
}
if (value instanceof List) {
return new JSONArray((List) value);
}
return (JSONArray) toJSON(value);
}
public <T> T getObject(int index, Class<T> clazz) {
Object obj = list.get(index);
return TypeUtils.castToJavaBean(obj, clazz);
}
public <T> T getObject(int index, Type type) {
Object obj = list.get(index);
if (type instanceof Class) {
return (T) TypeUtils.castToJavaBean(obj, (Class) type);
} else {
String json = JSON.toJSONString(obj);
return (T) JSON.parseObject(json, type);
}
}
public Boolean getBoolean(int index) {
Object value = get(index);
if (value == null) {
return null;
}
return castToBoolean(value);
}
public boolean getBooleanValue(int index) {
Object value = get(index);
if (value == null) {
return false;
}
return castToBoolean(value).booleanValue();
}
public Byte getByte(int index) {
Object value = get(index);
return castToByte(value);
}
public byte getByteValue(int index) {
Object value = get(index);
Byte byteVal = castToByte(value);
if (byteVal == null) {
return 0;
}
return byteVal.byteValue();
}
public Short getShort(int index) {
Object value = get(index);
return castToShort(value);
}
public short getShortValue(int index) {
Object value = get(index);
Short shortVal = castToShort(value);
if (shortVal == null) {
return 0;
}
return shortVal.shortValue();
}
public Integer getInteger(int index) {
Object value = get(index);
return castToInt(value);
}
public int getIntValue(int index) {
Object value = get(index);
Integer intVal = castToInt(value);
if (intVal == null) {
return 0;
}
return intVal.intValue();
}
public Long getLong(int index) {
Object value = get(index);
return castToLong(value);
}
public long getLongValue(int index) {
Object value = get(index);
Long longVal = castToLong(value);
if (longVal == null) {
return 0L;
}
return longVal.longValue();
}
public Float getFloat(int index) {
Object value = get(index);
return castToFloat(value);
}
public float getFloatValue(int index) {
Object value = get(index);
Float floatValue = castToFloat(value);
if (floatValue == null) {
return 0F;
}
return floatValue.floatValue();
}
public Double getDouble(int index) {
Object value = get(index);
return castToDouble(value);
}
public double getDoubleValue(int index) {
Object value = get(index);
Double doubleValue = castToDouble(value);
if (doubleValue == null) {
return 0D;
}
return doubleValue.doubleValue();
}
public BigDecimal getBigDecimal(int index) {
Object value = get(index);
return castToBigDecimal(value);
}
public BigInteger getBigInteger(int index) {
Object value = get(index);
return castToBigInteger(value);
}
public String getString(int index) {
Object value = get(index);
return castToString(value);
}
public java.util.Date getDate(int index) {
Object value = get(index);
return castToDate(value);
}
public java.sql.Date getSqlDate(int index) {
Object value = get(index);
return castToSqlDate(value);
}
public java.sql.Timestamp getTimestamp(int index) {
Object value = get(index);
return castToTimestamp(value);
}
/**
* @since 1.2.23
*/
public <T> List<T> toJavaList(Class<T> clazz) {
List<T> list = new ArrayList<T>(this.size());
ParserConfig config = ParserConfig.getGlobalInstance();
for (Object item : this) {
T classItem = (T) TypeUtils.cast(item, clazz, config);
list.add(classItem);
}
return list;
}
@Override
public Object clone() {
return new JSONArray(new ArrayList<Object>(list));
}
public boolean equals(Object obj) {
return this.list.equals(obj);
}
public int hashCode() {
return this.list.hashCode();
}
private void readObject(final java.io.ObjectInputStream in) throws IOException, ClassNotFoundException {
JSONObject.SecureObjectInputStream.ensureFields();
if (JSONObject.SecureObjectInputStream.fields != null && !JSONObject.SecureObjectInputStream.fields_error) {
ObjectInputStream secIn = new JSONObject.SecureObjectInputStream(in);
try {
secIn.defaultReadObject();
return;
} catch (java.io.NotActiveException e) {
// skip
}
}
in.defaultReadObject();
for (Object item : list) {
if (item != null) {
ParserConfig.global.checkAutoType(item.getClass().getName(), null);
}
}
}
}
| bug fix for JSONArray.readObject
| src/main/java/com/alibaba/fastjson/JSONArray.java | bug fix for JSONArray.readObject | <ide><path>rc/main/java/com/alibaba/fastjson/JSONArray.java
<ide>
<ide> in.defaultReadObject();
<ide> for (Object item : list) {
<del> if (item != null) {
<del> ParserConfig.global.checkAutoType(item.getClass().getName(), null);
<add> if (item == null) {
<add> continue;
<ide> }
<add>
<add> String typeName = item.getClass().getName();
<add> if (TypeUtils.getClassFromMapping(typeName) == null) {
<add> ParserConfig.global.checkAutoType(typeName, null);
<add> }
<ide> }
<ide> }
<ide> } |
|
Java | lgpl-2.1 | c3122240e8b904b4aab38da799535bfc2a10d4fc | 0 | Arabidopsis-Information-Portal/intermine,elsiklab/intermine,Arabidopsis-Information-Portal/intermine,elsiklab/intermine,elsiklab/intermine,elsiklab/intermine,elsiklab/intermine,Arabidopsis-Information-Portal/intermine,elsiklab/intermine,elsiklab/intermine,Arabidopsis-Information-Portal/intermine,Arabidopsis-Information-Portal/intermine,elsiklab/intermine,Arabidopsis-Information-Portal/intermine,Arabidopsis-Information-Portal/intermine,Arabidopsis-Information-Portal/intermine,elsiklab/intermine,Arabidopsis-Information-Portal/intermine | package org.intermine.api.query;
/*
* Copyright (C) 2002-2016 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.TimeZone;
import java.util.TreeSet;
import org.apache.log4j.Logger;
import org.intermine.InterMineException;
import org.intermine.api.bag.BagQueryConfig;
import org.intermine.api.bag.BagQueryResult;
import org.intermine.api.bag.BagQueryRunner;
import org.intermine.api.profile.InterMineBag;
import org.intermine.api.profile.ProfileManager;
import org.intermine.api.template.TemplateManager;
import org.intermine.metadata.ClassDescriptor;
import org.intermine.metadata.ConstraintOp;
import org.intermine.metadata.FieldDescriptor;
import org.intermine.metadata.Model;
import org.intermine.metadata.TypeUtil;
import org.intermine.metadata.Util;
import org.intermine.objectstore.ObjectStore;
import org.intermine.objectstore.ObjectStoreException;
import org.intermine.objectstore.query.BagConstraint;
import org.intermine.objectstore.query.ClassConstraint;
import org.intermine.objectstore.query.Constraint;
import org.intermine.objectstore.query.ConstraintSet;
import org.intermine.objectstore.query.ContainsConstraint;
import org.intermine.objectstore.query.FromElement;
import org.intermine.objectstore.query.OrderDescending;
import org.intermine.objectstore.query.PathExpressionField;
import org.intermine.objectstore.query.Query;
import org.intermine.objectstore.query.QueryCast;
import org.intermine.objectstore.query.QueryClass;
import org.intermine.objectstore.query.QueryCloner;
import org.intermine.objectstore.query.QueryCollectionPathExpression;
import org.intermine.objectstore.query.QueryCollectionReference;
import org.intermine.objectstore.query.QueryEvaluable;
import org.intermine.objectstore.query.QueryExpression;
import org.intermine.objectstore.query.QueryField;
import org.intermine.objectstore.query.QueryFunction;
import org.intermine.objectstore.query.QueryHelper;
import org.intermine.objectstore.query.QueryNode;
import org.intermine.objectstore.query.QueryObjectPathExpression;
import org.intermine.objectstore.query.QueryObjectReference;
import org.intermine.objectstore.query.QueryPathExpression;
import org.intermine.objectstore.query.QueryPathExpressionWithSelect;
import org.intermine.objectstore.query.QuerySelectable;
import org.intermine.objectstore.query.QueryValue;
import org.intermine.objectstore.query.Queryable;
import org.intermine.objectstore.query.SimpleConstraint;
import org.intermine.objectstore.query.WidthBucketFunction;
import org.intermine.pathquery.LogicExpression;
import org.intermine.pathquery.OrderDirection;
import org.intermine.pathquery.OrderElement;
import org.intermine.pathquery.OuterJoinStatus;
import org.intermine.pathquery.Path;
import org.intermine.pathquery.PathConstraint;
import org.intermine.pathquery.PathConstraintAttribute;
import org.intermine.pathquery.PathConstraintBag;
import org.intermine.pathquery.PathConstraintIds;
import org.intermine.pathquery.PathConstraintLookup;
import org.intermine.pathquery.PathConstraintLoop;
import org.intermine.pathquery.PathConstraintMultiValue;
import org.intermine.pathquery.PathConstraintMultitype;
import org.intermine.pathquery.PathConstraintNull;
import org.intermine.pathquery.PathConstraintRange;
import org.intermine.pathquery.PathConstraintSubclass;
import org.intermine.pathquery.PathException;
import org.intermine.pathquery.PathQuery;
import org.intermine.util.PropertiesUtil;
/**
* Helper methods for main controller and main action
* @author Mark Woodbridge
* @author Thomas Riley
* @author Matthew Wakeling
*/
public final class MainHelper
{
private MainHelper() {
}
private static final Logger LOG = Logger.getLogger(MainHelper.class);
private static final LookupTokeniser LOOKUP_TOKENISER = LookupTokeniser.getLookupTokeniser();
/**
* Converts a PathQuery object into an ObjectStore Query object, and optionally populates a Map
* from String path in the PathQuery to the object in the Query that represents it.
*
* @param pathQuery the PathQuery
* @param savedBags the current saved bags map (a Map from bag name to InterMineBag)
* @param pathToQueryNode optional parameter which will be populated with entries, mapping from
* String path in the pathQuery to objects in the result Query
* @param bagQueryRunner a BagQueryRunner to use to perform LOOKUPs
* @param returnBagQueryResults optional parameter in which any BagQueryResult objects can be
* returned
* @return an ObjectStore Query object
* @throws ObjectStoreException if something goes wrong
*/
public static Query makeQuery(PathQuery pathQuery, Map<String, InterMineBag> savedBags,
Map<String, QuerySelectable> pathToQueryNode, BagQueryRunner bagQueryRunner,
Map<String, BagQueryResult> returnBagQueryResults) throws ObjectStoreException {
synchronized (pathQuery) {
List<String> problems = pathQuery.verifyQuery();
if (!problems.isEmpty()) {
throw new ObjectStoreException("PathQuery is invalid: " + problems);
}
Query q = new Query();
try {
makeQuery(q, pathQuery.getRootClass(), pathQuery, savedBags, pathToQueryNode,
bagQueryRunner, returnBagQueryResults);
} catch (PathException e) {
throw new Error("PathQuery is invalid, but was valid earlier", e);
}
return q;
}
}
/**
* Converts a PathQuery object into an ObjectStore Query object, and optionally populates a Map
* from String path in the PathQuery to the object in the Query that represents it. This is the
* recursive private method that performs the algorithm.
*
* @param q a Query, QueryObjectPathExpression, or QueryCollectionPathExpression, depending on
* the level of recursion reached so far
* @param root the path representing the level of recursion - we will process this outer join
* group
* @param query the PathQuery
* @param savedBags the current saved bags map (a Map from bag name to InterMineBag)
* @param pathToQueryNode optional parameter which will be populated with entries, mapping from
* String path in the pathQuery to objects in the result Query
* @param bagQueryRunner a BagQueryRunner to use to perform LOOKUPs
* @param returnBagQueryResults optional parameter in which any BagQueryResult objects can be
* returned
* @throws ObjectStoreException if something goes wrong
*/
private static void makeQuery(Queryable q, String root, PathQuery query,
Map<String, InterMineBag> savedBags, Map<String, QuerySelectable> pathToQueryNode,
BagQueryRunner bagQueryRunner,
Map<String, BagQueryResult> returnBagQueryResults) throws ObjectStoreException {
PathQuery pathQuery = query;
Model model = pathQuery.getModel();
// We need to call getQueryToExecute() first. For template queries this gets a query that
// excludes any optional constraints that have been switched off. A normal PathQuery is
// unchanged.
pathQuery = pathQuery.getQueryToExecute();
try {
// This is the root constraint set that will be set in the query
ConstraintSet andCs = new ConstraintSet(ConstraintOp.AND);
// This is the Map that stores what we will put in pathToQueryNode. Because we can't
// trust what is in there already, we use a separate variable and copy across afterwards
Map<String, QuerySelectable> queryBits = new HashMap<String, QuerySelectable>();
// If we have recursed, and are operating on a PathExpression, then we need to extract
// the default class which was set up in the parent group and add it to the queryBits
if (q instanceof QueryObjectPathExpression) {
queryBits.put(root, ((QueryObjectPathExpression) q).getDefaultClass());
} else if (q instanceof QueryCollectionPathExpression) {
queryBits.put(root, ((QueryCollectionPathExpression) q).getDefaultClass());
}
// This is a Map from main path to outer join group of all classes in the query
Map<String, String> outerJoinGroups = pathQuery.getOuterJoinGroups();
// This is the subclass map from the query, for creating Path objects
Map<String, String> subclasses = pathQuery.getSubclasses();
// Get the logic expression for the relevant outer join group, and the list of
// relevant constraint codes
Set<String> relevantCodes = pathQuery.getConstraintGroups().get(root);
LogicExpression logic = pathQuery.getConstraintLogicForGroup(root);
logic = handleNullOuterJoins(root, pathQuery, model, relevantCodes,
logic);
// This is the set of loop constraints that participate in the class collapsing
// mechanism. All others must have a ClassConstraint generated for them.
Set<PathConstraintLoop> participatingLoops = findParticipatingLoops(logic, pathQuery
.getConstraints());
// This is the map of EQUALS loop constraints, from the path that should be omitted
// from the Query to the path that represents both paths.
Map<String, String> loops = makeLoopsMap(participatingLoops);
// Get any paths in the query that are constrained to be NULL/NOT NULL references or
// collections AND don't appear in other constraints or the query view. These will only
// be accessed in an EXISTS subquery and shouldn't be add to the FROM.
Map<String, String> pathConstraintNullOnly =
getPathConstraintNulls(model, pathQuery, true);
// Set up queue system. We don't know what order we want to process these entries in,
// so a queue allows us to put one we can't process yet to the back of the queue to
// process later
LinkedList<String> queue = new LinkedList<String>();
for (String path : outerJoinGroups.keySet()) {
queue.addLast(path);
}
Map<String, String> deferralReasons = new HashMap<String, String>();
int queueDeferred = 0;
// This is a Map of PathExpression objects that have been created. They will be added to
// the SELECT list later on, when we can determine the correct order in the SELECT list.
Map<String, QueryPathExpressionWithSelect> pathExpressions
= new HashMap<String, QueryPathExpressionWithSelect>();
while (!queue.isEmpty()) {
if (queueDeferred > queue.size() + 2) {
throw new IllegalArgumentException("Cannot handle entries in queue: " + queue
+ ", reasons: " + deferralReasons + ", root = " + root);
}
String stringPath = queue.removeFirst();
deferralReasons.remove(stringPath);
Path path = new Path(model, stringPath, subclasses);
String outerJoinGroup = outerJoinGroups.get(stringPath);
if (path.isRootPath()) {
// This is the root path. Just add the QueryClass, no further action.
if (root.equals(outerJoinGroup)) {
// This class is relevant to this outer join group
QueryClass qc = new QueryClass(path.getEndType());
((Query) q).addFrom(qc);
queryBits.put(stringPath, qc);
}
} else if (stringPath.equals(root)) {
// We are on the root of an outer join. No action required
} else {
String parent = path.getPrefix().getNoConstraintsString();
QueryClass parentQc = (QueryClass) ((queryBits.get(parent)
instanceof QueryClass) ? queryBits.get(parent) : null);
if (parentQc == null) {
if (root.equals(outerJoinGroups.get(parent))) {
// We cannot process this path yet. It depends on a parent that hasn't
// been processed yet. Put it to the back of the queue.
deferralReasons.put(stringPath, "Could not process path " + stringPath
+ " because its parent has not yet been processed");
queue.addLast(stringPath);
queueDeferred++;
continue;
}
} else {
if (root.equals(outerJoinGroup)) {
// This class is relevant to this outer join group
QueryClass qc;
if (loops.containsKey(stringPath)) {
// This path is looped on another path
qc = (QueryClass) queryBits.get(loops.get(stringPath));
if (qc == null) {
deferralReasons.put(stringPath, "Could not process path "
+ stringPath + " because it is looped onto a class ("
+ loops.get(stringPath) + ") that has not been "
+ "processed yet");
queue.addLast(stringPath);
queueDeferred++;
continue;
}
} else {
qc = new QueryClass(path.getEndType());
if (!pathConstraintNullOnly.containsKey(path.toString())) {
if (q instanceof Query) {
((Query) q).addFrom(qc);
} else {
((QueryCollectionPathExpression) q).addFrom(qc);
}
}
}
// unless there is ONLY a null constraint on this ref/col path we need
// to add a contains constraint to make the join
if (!pathConstraintNullOnly.containsKey(stringPath)) {
if (path.endIsReference()) {
andCs.addConstraint(new ContainsConstraint(
new QueryObjectReference(parentQc,
path.getLastElement()), ConstraintOp.CONTAINS,
qc));
} else {
andCs.addConstraint(new ContainsConstraint(
new QueryCollectionReference(parentQc,
path.getLastElement()), ConstraintOp.CONTAINS,
qc));
}
}
queryBits.put(stringPath, qc);
} else {
// This is a path from another outer join group. We only need to act if
// the parent path is from this outer join group - in that case, we
// make a PathExpression and recurse
if (root.equals(outerJoinGroups.get(parent))) {
// We need to act. However, first we need to know whether to use a
// collection or reference path expression
boolean isCollection = path.endIsCollection();
// Even if this is false, we may still need to upgrade to collection
// if there are multiple paths in the outer join group
if (!isCollection) {
int groupSize = 0;
for (Map.Entry<String, String> entry
: outerJoinGroups.entrySet()) {
if (outerJoinGroup.equals(entry.getValue())) {
groupSize++;
}
}
if (groupSize > 1) {
isCollection = true;
}
}
if (isCollection) {
QueryCollectionPathExpression qn
= new QueryCollectionPathExpression(parentQc,
path.getLastElement(), path.getEndType());
makeQuery(qn, stringPath, pathQuery, savedBags,
pathToQueryNode, bagQueryRunner, returnBagQueryResults);
queryBits.put(stringPath, qn);
pathExpressions.put(stringPath, qn);
} else {
QueryObjectPathExpression qn
= new QueryObjectPathExpression(parentQc,
path.getLastElement(), path.getEndType());
makeQuery(qn, stringPath, pathQuery, savedBags, pathToQueryNode,
bagQueryRunner, returnBagQueryResults);
queryBits.put(stringPath, qn);
pathExpressions.put(stringPath, qn);
}
}
}
}
}
deferralReasons.remove(stringPath);
queueDeferred = 0;
}
Map<String, Constraint> codeToConstraint = putConstraintsInMap(q,
savedBags, bagQueryRunner, returnBagQueryResults,
pathQuery, model, queryBits, subclasses, relevantCodes,
participatingLoops);
// Use the constraint logic to create a ConstraintSet structure with the constraints
// inserted into it
createConstraintStructure(logic, andCs, codeToConstraint);
setConstraints(q, andCs);
List<QuerySelectable> select = generateSelectList(root, pathQuery, model, queryBits,
outerJoinGroups, subclasses, pathExpressions);
copySelectList(q, select);
generateOrderBy(q, pathQuery, model, queryBits, subclasses);
if (pathToQueryNode != null) {
pathToQueryNode.putAll(queryBits);
}
} catch (PathException e) {
throw new ObjectStoreException("PathException while converting PathQuery to ObjectStore"
+ " Query", e);
}
}
private static LogicExpression handleNullOuterJoins(String root,
PathQuery pathQuery, Model model, Set<String> relevantCodes,
LogicExpression logicExpression) {
LogicExpression logic = logicExpression;
// This is complicated - for NULL/NOT NULL constraints on refs/cols that span an outer
// join boundary we need the constraint to be on the left side of the boundary, i.e.
// in the main part of the query rather than subquery on the select. We may need to
// move a constraint code from another outer join group.
// e.g. Company.departments IS_NOT_NULL and Company.departments is an outer join
Map<String, String> nullRefColConstraints = getPathConstraintNulls(model, pathQuery,
false);
for (String constraintPath : nullRefColConstraints.keySet()) {
OuterJoinStatus ojs = pathQuery.getOuterJoinStatus(constraintPath);
if (ojs == OuterJoinStatus.OUTER) {
// which side of outer join are we on?
if (root.split("\\.").length < constraintPath.split("\\.").length) {
// we're on the left side of the outer join so we want to add this
// constraint to the relevant codes now
String code = nullRefColConstraints.get(constraintPath);
if (!relevantCodes.contains(code)) {
relevantCodes.add(code);
logic = addToConstraintLogic(logic, code);
}
} else {
// we've recursed into an outer join so we don't want to process this
// constraint now, remove it if it's in the relevant codes
String code = nullRefColConstraints.get(constraintPath);
if (relevantCodes.contains(code)) {
relevantCodes.remove(code);
logic = removeFromConstraintLogic(logic, code);
}
}
}
}
return logic;
}
private static void setConstraints(Queryable q, ConstraintSet andCs) {
if (!andCs.getConstraints().isEmpty()) {
Constraint c = andCs;
while ((c instanceof ConstraintSet)
&& (((ConstraintSet) c).getConstraints().size() == 1)) {
c = ((ConstraintSet) c).getConstraints().iterator().next();
}
q.setConstraint(c);
}
}
private static void copySelectList(Queryable q, List<QuerySelectable> select) {
// Copy select list into query:
QueryClass defaultClass = null;
if (q instanceof QueryObjectPathExpression) {
defaultClass = ((QueryObjectPathExpression) q).getDefaultClass();
}
if ((select.size() == 1) && select.get(0).equals(defaultClass)) {
// Don't add anything to the SELECT list - default is fine
} else {
for (QuerySelectable qs : select) {
if (qs instanceof QueryObjectPathExpression) {
QueryObjectPathExpression qope = (QueryObjectPathExpression) qs;
if (qope.getSelect().size() > 1) {
for (int i = 0; i < qope.getSelect().size(); i++) {
q.addToSelect(new PathExpressionField(qope, i));
}
} else {
q.addToSelect(qope);
}
} else {
q.addToSelect(qs);
}
}
}
}
private static void generateOrderBy(Queryable q, PathQuery pathQuery,
Model model, Map<String, QuerySelectable> queryBits,
Map<String, String> subclasses) throws PathException {
if (q instanceof Query) {
Query qu = (Query) q;
for (OrderElement order : pathQuery.getOrderBy()) {
QueryField qf = (QueryField) queryBits.get(order.getOrderPath());
if (qf == null) {
Path path = new Path(model, order.getOrderPath(), subclasses);
QueryClass qc = (QueryClass) queryBits.get(path.getPrefix()
.getNoConstraintsString());
qf = new QueryField(qc, path.getLastElement());
queryBits.put(order.getOrderPath(), qf);
}
if ((!qu.getOrderBy().contains(qf)) && (!qu.getOrderBy()
.contains(new OrderDescending(qf)))) {
if (order.getDirection().equals(OrderDirection.DESC)) {
qu.addToOrderBy(new OrderDescending(qf));
} else {
qu.addToOrderBy(qf);
}
}
}
for (String view : pathQuery.getView()) {
QueryField qf = (QueryField) queryBits.get(view);
if (qf != null) {
// If qf IS null, that means it is in another outer join group, as we have
// populated queryBits earlier with all view objects
if ((!qu.getOrderBy().contains(qf)) && (!qu.getOrderBy()
.contains(new OrderDescending(qf)))) {
qu.addToOrderBy(qf);
}
}
}
}
}
// Generate the SELECT list
private static List<QuerySelectable> generateSelectList(String root,
PathQuery pathQuery, Model model,
Map<String, QuerySelectable> queryBits,
Map<String, String> outerJoinGroups,
Map<String, String> subclasses,
Map<String, QueryPathExpressionWithSelect> pathExpressions)
throws PathException {
HashSet<String> pathExpressionsDone = new HashSet<String>();
List<QuerySelectable> select = new ArrayList<QuerySelectable>();
for (String view : pathQuery.getView()) {
Path path = new Path(model, view, subclasses);
String parentPath = path.getPrefix().getNoConstraintsString();
String outerJoinGroup = outerJoinGroups.get(parentPath);
if (root.equals(outerJoinGroup)) {
QueryClass qc = (QueryClass) queryBits.get(parentPath);
QueryField qf = new QueryField(qc, path.getLastElement());
queryBits.put(view, qf);
if (!select.contains(qc)) {
select.add(qc);
}
} else {
while ((!path.isRootPath())
&& (!root.equals(outerJoinGroups.get(path.getPrefix()
.getNoConstraintsString())))) {
path = path.getPrefix();
}
if (!path.isRootPath()) {
// We have found a path in the view that is a path expression we want to
// use
view = path.getNoConstraintsString();
if (!pathExpressionsDone.contains(view)) {
QueryPathExpressionWithSelect pe = pathExpressions.get(view);
QueryClass qc = pe.getQueryClass();
if (!select.contains(qc)) {
select.add(qc);
}
if (!select.contains(pe)) {
select.add(pe);
}
}
}
}
}
return select;
}
private static Map<String, Constraint> putConstraintsInMap(Queryable q,
Map<String, InterMineBag> savedBags, BagQueryRunner bagQueryRunner,
Map<String, BagQueryResult> returnBagQueryResults,
PathQuery pathQuery, Model model,
Map<String, QuerySelectable> queryBits,
Map<String, String> subclasses, Set<String> relevantCodes,
Set<PathConstraintLoop> participatingLoops) throws PathException,
BagNotFound, ObjectStoreException {
// For each of the relevant codes, produce a Constraint object, and put it in a Map.
// Constraints that do not have a code (namely loop NOT EQUALS) can be put straight into
// the andCs.
Map<String, Constraint> codeToConstraint = new HashMap<String, Constraint>();
for (Map.Entry<PathConstraint, String> entry : pathQuery.getConstraints().entrySet()) {
String code = entry.getValue();
if (relevantCodes.contains(code)) {
PathConstraint constraint = entry.getKey();
String stringPath = constraint.getPath();
Path path = new Path(model, stringPath, subclasses);
QuerySelectable field = queryBits.get(constraint.getPath());
if (field == null) {
// This must be a constraint on an attribute, as all the classes will
// already be in querybits
QueryClass qc = (QueryClass) queryBits.get(path.getPrefix()
.getNoConstraintsString());
field = new QueryField(qc, path.getLastElement());
queryBits.put(stringPath, field);
}
if (constraint instanceof PathConstraintAttribute) {
PathConstraintAttribute pca = (PathConstraintAttribute) constraint;
Class<?> fieldType = path.getEndType();
if (String.class.equals(fieldType)) {
codeToConstraint.put(code, makeQueryStringConstraint(
(QueryField) field, pca));
} else if (Date.class.equals(fieldType)) {
codeToConstraint.put(code, makeQueryDateConstraint(
(QueryField) field, pca));
} else {
// Use simple forms of operators when not dealing with strings.
ConstraintOp simpleOp = ConstraintOp.EXACT_MATCH == pca.getOp()
? ConstraintOp.EQUALS
: ConstraintOp.STRICT_NOT_EQUALS == pca.getOp()
? ConstraintOp.NOT_EQUALS : pca.getOp();
codeToConstraint.put(code, new SimpleConstraint((QueryField) field,
simpleOp, new QueryValue(TypeUtil.stringToObject(
fieldType, pca.getValue()))));
}
} else if (constraint instanceof PathConstraintNull) {
if (path.endIsAttribute()) {
codeToConstraint.put(code, new SimpleConstraint((QueryField) field,
constraint.getOp()));
} else {
String parent = path.getPrefix().getNoConstraintsString();
QueryClass parentQc = (QueryClass) ((queryBits.get(parent)
instanceof QueryClass) ? queryBits.get(parent) : null);
if (path.endIsReference()) {
QueryObjectReference qr = new QueryObjectReference(parentQc,
path.getLastElement());
codeToConstraint.put(code, new ContainsConstraint(qr,
constraint.getOp()));
} else { // collection
QueryCollectionReference qr = new QueryCollectionReference(parentQc,
path.getLastElement());
codeToConstraint.put(code, new ContainsConstraint(qr,
constraint.getOp()));
}
}
} else if (constraint instanceof PathConstraintLoop) {
// We need to act if this is not a participating constraint - otherwise
// this has been taken care of above.
if (!participatingLoops.contains(constraint)) {
PathConstraintLoop pcl = (PathConstraintLoop) constraint;
if (pcl.getPath().length() > pcl.getLoopPath().length()) {
codeToConstraint.put(code, new ClassConstraint((QueryClass)
queryBits.get(pcl.getLoopPath()), constraint.getOp(),
(QueryClass) field));
} else {
codeToConstraint.put(code, new ClassConstraint((QueryClass) field,
constraint.getOp(), (QueryClass) queryBits
.get(((PathConstraintLoop) constraint).getLoopPath())));
}
}
} else if (constraint instanceof PathConstraintSubclass) {
// No action needed.
} else if (constraint instanceof PathConstraintBag) {
PathConstraintBag pcb = (PathConstraintBag) constraint;
InterMineBag bag = savedBags.get(pcb.getBag());
if (bag == null) {
throw new BagNotFound(pcb.getBag());
}
codeToConstraint.put(code, new BagConstraint((QueryNode) field, pcb.getOp(),
bag.getOsb()));
} else if (constraint instanceof PathConstraintIds) {
codeToConstraint.put(code, new BagConstraint(new QueryField(
(QueryClass) field, "id"), constraint.getOp(),
((PathConstraintIds) constraint).getIds()));
} else if (constraint instanceof PathConstraintRange) {
PathConstraintRange pcr = (PathConstraintRange) constraint;
codeToConstraint.put(code, makeRangeConstraint(q, (QueryNode) field, pcr));
} else if (constraint instanceof PathConstraintMultitype) {
PathConstraintMultitype pcmt = (PathConstraintMultitype) constraint;
codeToConstraint.put(code, makeMultiTypeConstraint(pathQuery.getModel(),
(QueryNode) field, pcmt));
} else if (constraint instanceof PathConstraintMultiValue) {
Class<?> fieldType = path.getEndType();
if (String.class.equals(fieldType)) {
codeToConstraint.put(code, new BagConstraint((QueryField) field,
constraint.getOp(), ((PathConstraintMultiValue) constraint)
.getValues()));
} else {
Collection<Object> objects = new ArrayList<Object>();
for (String s : ((PathConstraintMultiValue) constraint).getValues()) {
objects.add(TypeUtil.stringToObject(fieldType, s));
}
codeToConstraint.put(code, new BagConstraint((QueryField) field,
constraint.getOp(), objects));
}
} else if (constraint instanceof PathConstraintLookup) {
QueryClass qc = (QueryClass) field;
PathConstraintLookup pcl = (PathConstraintLookup) constraint;
if (bagQueryRunner == null) {
throw new NullPointerException("Cannot convert this PathQuery to an "
+ "ObjectStore Query without a BagQueryRunner");
}
String identifiers = pcl.getValue();
BagQueryResult bagQueryResult;
List<String> identifierList = LOOKUP_TOKENISER.tokenise(identifiers);
try {
bagQueryResult = bagQueryRunner.searchForBag(qc.getType()
.getSimpleName(), identifierList, pcl.getExtraValue(), true);
} catch (ClassNotFoundException e) {
throw new ObjectStoreException(e);
} catch (InterMineException e) {
throw new ObjectStoreException(e);
}
codeToConstraint.put(code, new BagConstraint(new QueryField(qc, "id"),
ConstraintOp.IN, bagQueryResult.getMatchAndIssueIds()));
if (returnBagQueryResults != null) {
returnBagQueryResults.put(stringPath, bagQueryResult);
}
} else {
throw new ObjectStoreException("Unknown constraint type "
+ constraint.getClass().getName());
}
}
}
return codeToConstraint;
}
/**
* Construct a new multi-type constraint.
* @param model The model to look for types within.
* @param field The subject of the constraint.
* @param pcmt The constraint itself.
* @return A constraint.
* @throws ObjectStoreException if the constraint names types that are not in the model.
*/
protected static Constraint makeMultiTypeConstraint(
Model model,
QueryNode field,
PathConstraintMultitype pcmt) throws ObjectStoreException {
QueryField typeClass = new QueryField((QueryClass) field, "class");
ConstraintOp op = (pcmt.getOp() == ConstraintOp.ISA)
? ConstraintOp.IN : ConstraintOp.NOT_IN;
Set<Class<?>> classes = new TreeSet<Class<?>>(new ClassNameComparator());
for (String name: pcmt.getValues()) {
ClassDescriptor cd = model.getClassDescriptorByName(name);
if (cd == null) { // PathQueries should take care of this, but you know.
throw new ObjectStoreException(
String.format("%s is not a class in the %s model", name, model.getName()));
}
classes.add(cd.getType());
}
return new BagConstraint(typeClass, op, classes);
}
private static Map<String, String> makeLoopsMap(Collection<PathConstraintLoop> constraints) {
// A PathConstraintLoop should participate in this mechanism if it is an EQUALS constraint,
// and its code is not inside an OR in the constraint logic.
// Let's look at this from an equivalence groups point of view. We need to cope with the
// situation where a = a.b.c and a.d = a.b.c, putting all three into an equivalence group.
// The group name should be the shortest path in the group, or the lowest compareTo() for
// a tie-break.
Map<String, String> membership = new HashMap<String, String>();
Map<String, Set<String>> groups = new HashMap<String, Set<String>>();
for (PathConstraintLoop loop : constraints) {
if (ConstraintOp.EQUALS.equals(loop.getOp())) {
String path1 = loop.getPath();
String path2 = loop.getLoopPath();
if (membership.containsKey(path1)) {
if (membership.containsKey(path2)) {
String existingGroup1 = membership.get(path1);
String existingGroup2 = membership.get(path2);
if (!existingGroup1.equals(existingGroup2)) {
Set<String> members1 = groups.remove(existingGroup1);
Set<String> members2 = groups.remove(existingGroup2);
members1.addAll(members2);
String shorter = shorterPath(existingGroup1, existingGroup2);
for (String toAdd : members1) {
membership.put(toAdd, shorter);
}
groups.put(shorter, members1);
}
} else {
String existingGroup = membership.get(path1);
Set<String> members = groups.remove(existingGroup);
members.add(path2);
String shorter = shorterPath(path2, existingGroup);
for (String toAdd : members) {
membership.put(toAdd, shorter);
}
groups.put(shorter, members);
}
} else {
if (membership.containsKey(path2)) {
String existingGroup = membership.get(path2);
Set<String> members = groups.remove(existingGroup);
members.add(path1);
String shorter = shorterPath(path1, existingGroup);
for (String toAdd : members) {
membership.put(toAdd, shorter);
}
groups.put(shorter, members);
} else {
String shorter = shorterPath(path1, path2);
membership.put(path2, shorter);
membership.put(path1, shorter);
groups.put(shorter, new HashSet<String>(Arrays.asList(path1, path2)));
}
}
}
}
Map<String, String> retval = new HashMap<String, String>();
for (Map.Entry<String, String> entry : membership.entrySet()) {
if (!entry.getKey().equals(entry.getValue())) {
retval.put(entry.getKey(), entry.getValue());
}
}
return retval;
}
// find any reference or collection paths in query that have NULL/NOT NULL constraints.
// If nullOnly then return those that have a NULL/NOT NULL constraint that otherwise don't
// appear in the view or other constraints
private static Map<String, String> getPathConstraintNulls(Model model, PathQuery pq,
boolean nullOnly) {
Map<String, String> nullRefsAndCols = new HashMap<String, String>();
for (Map.Entry<PathConstraint, String> entry : pq.getConstraints().entrySet()) {
PathConstraint constraint = entry.getKey();
String code = entry.getValue();
if (constraint instanceof PathConstraintNull) {
try {
Path constraintPath = new Path(model, constraint.getPath());
if (constraintPath.endIsReference() || constraintPath.endIsCollection()) {
boolean isNullOnly = true;
// look for any view elements starting with this path
for (String viewPath : pq.getView()) {
if (viewPath.startsWith(constraintPath.toString())) {
isNullOnly = false;
}
}
// look for any other constraints starting with this path
for (PathConstraint otherCon : pq.getConstraints().keySet()) {
if (otherCon != constraint
&& otherCon.getPath().startsWith(constraintPath.toString())) {
isNullOnly = false;
}
}
// constraint path wasn't found elsewhere so it's a null collection only
if (nullOnly && isNullOnly) {
nullRefsAndCols.put(constraintPath.toString(), code);
} else if (!nullOnly) {
nullRefsAndCols.put(constraintPath.toString(), code);
}
}
} catch (PathException e) {
// this shouldn't happen because the query is already verified
LOG.warn("Error finding paths constrainted to null only:" + e);
}
}
}
return nullRefsAndCols;
}
private static String shorterPath(String path1, String path2) {
if (path1.length() > path2.length()) {
return path2;
} else if (path2.length() > path1.length()) {
return path1;
} else if (path1.compareTo(path2) > 0) {
return path2;
} else if (path1.compareTo(path2) < 0) {
return path1;
} else {
throw new IllegalArgumentException("Two paths are identical: " + path1);
}
}
/**
* Returns the Set of PathConstraintLoop objects that will participate in the QueryClass
* collapsing mechanism.
*
* @param logic the constraint logic
* @param constraints a Map from PathConstraint to code
* @return a Set of PathConstraintLoop objects
*/
protected static Set<PathConstraintLoop> findParticipatingLoops(LogicExpression logic,
Map<PathConstraint, String> constraints) {
if (logic != null) {
LogicExpression.Node node = logic.getRootNode();
Set<String> codes = new HashSet<String>();
findAndCodes(codes, node);
Set<PathConstraintLoop> retval = new HashSet<PathConstraintLoop>();
for (Map.Entry<PathConstraint, String> entry : constraints.entrySet()) {
if (codes.contains(entry.getValue())) {
if (entry.getKey() instanceof PathConstraintLoop) {
if (ConstraintOp.EQUALS.equals(entry.getKey().getOp())) {
retval.add((PathConstraintLoop) entry.getKey());
}
}
}
}
return retval;
}
return Collections.emptySet();
}
/**
* Finds all the codes in a constraint logic that are ANDed in the given constraint logic.
*
* @param codes codes are added to this
* @param node a node to traverse
*/
protected static void findAndCodes(Set<String> codes, LogicExpression.Node node) {
if (node instanceof LogicExpression.Variable) {
codes.add(((LogicExpression.Variable) node).getName());
} else if (node instanceof LogicExpression.And) {
for (LogicExpression.Node child : ((LogicExpression.And) node).getChildren()) {
findAndCodes(codes, child);
}
}
}
/**
* Make a SimpleConstraint for the given constraint. The Constraint will be
* case-insensitive. If the constraint value contains a wildcard and the operation is "=" or
* "<>" then the operation will be changed to "LIKE" or "NOT_LIKE" as appropriate.
*/
private static SimpleConstraint makeQueryStringConstraint(QueryField qf,
PathConstraintAttribute c) {
QueryEvaluable qe;
String value;
ConstraintOp op = c.getOp();
// Perform case insensitive matches, unless asked specifically not to.
if (ConstraintOp.EXACT_MATCH.equals(op) || ConstraintOp.STRICT_NOT_EQUALS.equals(op)) {
qe = qf;
value = c.getValue();
op = (ConstraintOp.EXACT_MATCH.equals(op))
? ConstraintOp.EQUALS: ConstraintOp.NOT_EQUALS;
} else {
qe = new QueryExpression(QueryExpression.LOWER, qf);
value = Util.wildcardUserToSql(c.getValue().toLowerCase());
}
// notes:
// - we always turn EQUALS into a MATCHES(LIKE) constraint and rely on Postgres
// to be sensible
// - lowerCaseValue is quoted in a way suitable for a LIKE constraint, but not for an
// normal equals. for example 'Dpse\GA10108' needs to be 'Dpse\\GA10108' for equals
// but 'Dpse\\\\GA10108' (and hence "Dpse\\\\\\\\GA10108" as a Java string because
// backslash must be quoted with a backslash)
if (ConstraintOp.EQUALS.equals(op)) {
return new SimpleConstraint(qe, ConstraintOp.MATCHES, new QueryValue(value));
} else if (ConstraintOp.NOT_EQUALS.equals(op)) {
return new SimpleConstraint(qe, ConstraintOp.DOES_NOT_MATCH, new QueryValue(value));
} else if (ConstraintOp.CONTAINS.equals(op)) {
return new SimpleConstraint(qe, ConstraintOp.MATCHES,
new QueryValue("%" + value + "%"));
} else if (ConstraintOp.DOES_NOT_CONTAIN.equals(op)) {
return new SimpleConstraint(qe, ConstraintOp.DOES_NOT_MATCH,
new QueryValue("%" + value + "%"));
} else {
return new SimpleConstraint(qe, op, new QueryValue(value));
}
}
/**
* Make a SimpleConstraint for the given Date Constraint. The time stored in the Date will be
* ignored. Example webapp constraints and the coresponding object store constraints:
* <table>
* <thead>
* <tr>
* <th>Webapp Version</th>
* <th>ObjectStore Version</th>
* </tr>
* </thead>
* <tbody>
* <tr>
* <td>
* <code><= 2008-01-02</code>
* </td>
* <td>
* <code>>= 2008-01-02 23:59:59</code>
* </td>
* </tr>
* <tr>
* <td>
* <code>> 2008-01-02</code>
* </td>
* <td>
* <code>< 2008-01-02 00:00:00</code>
* </td>
* </tr>
* <tr>
* <td>
* <code>> 2008-01-02</code>
* </td>
* <td>
* <code>> 2008-01-02 23:59:59</code>
* </td>
* </tr>
* <tr>
* <td>
* <code>>= 2008-01-02</code>
* </td>
* <td>
* <code>> 2008-01-02 00:00:00</code>
* </td>
* </tr>
* </tbody>
* </table>
*
* @param qf the QueryNode in the new query
* @param c the webapp constraint
* @return a new object store constraint
*/
protected static Constraint makeQueryDateConstraint(QueryField qf, PathConstraintAttribute c) {
Date dateValue = (Date) TypeUtil.stringToObject(Date.class, c.getValue());
Calendar startOfDay = GregorianCalendar.getInstance(TimeZone.getTimeZone("GMT"));
startOfDay.setTime(dateValue);
startOfDay.set(Calendar.HOUR_OF_DAY, 0);
startOfDay.set(Calendar.MINUTE, 0);
startOfDay.set(Calendar.SECOND, 0);
startOfDay.set(Calendar.MILLISECOND, 0);
QueryValue startOfDayQV = new QueryValue(startOfDay.getTime());
Calendar endOfDay = (Calendar) startOfDay.clone();
endOfDay.add(Calendar.DATE, 1);
QueryValue endOfDayQV = new QueryValue(endOfDay.getTime());
if (ConstraintOp.EXACT_MATCH.equals(c.getOp()) || ConstraintOp.EQUALS.equals(c.getOp())) {
ConstraintSet cs = new ConstraintSet(ConstraintOp.AND);
cs.addConstraint(new SimpleConstraint(qf, ConstraintOp.GREATER_THAN_EQUALS,
startOfDayQV));
cs.addConstraint(new SimpleConstraint(qf, ConstraintOp.LESS_THAN, endOfDayQV));
return cs;
} else if (ConstraintOp.NOT_EQUALS.equals(c.getOp())) {
ConstraintSet cs = new ConstraintSet(ConstraintOp.OR);
cs.addConstraint(new SimpleConstraint(qf, ConstraintOp.LESS_THAN, startOfDayQV));
cs.addConstraint(new SimpleConstraint(qf, ConstraintOp.GREATER_THAN_EQUALS,
endOfDayQV));
return cs;
} else if (ConstraintOp.LESS_THAN_EQUALS.equals(c.getOp())) {
return new SimpleConstraint(qf, ConstraintOp.LESS_THAN, endOfDayQV);
} else if (ConstraintOp.LESS_THAN.equals(c.getOp())) {
return new SimpleConstraint(qf, ConstraintOp.LESS_THAN, startOfDayQV);
} else if (ConstraintOp.GREATER_THAN.equals(c.getOp())) {
return new SimpleConstraint(qf, ConstraintOp.GREATER_THAN_EQUALS, endOfDayQV);
} else if (ConstraintOp.GREATER_THAN_EQUALS.equals(c.getOp())) {
return new SimpleConstraint(qf, ConstraintOp.GREATER_THAN_EQUALS, startOfDayQV);
} else {
throw new RuntimeException("Unknown ConstraintOp: " + c);
}
}
/**
* Given a LogicExpression, a Map from codes to Constraint objects, and a ConstraintSet to put
* it all in, construct a tree of ConstraintSets that reflects the expression.
*
* @param logic the LogicExpression object
* @param cs the ConstraintSet to put the constraints in
* @param codeToConstraint a Map from constraint code to Constraint object
*/
protected static void createConstraintStructure(LogicExpression logic, ConstraintSet cs,
Map<String, Constraint> codeToConstraint) {
if (logic != null) {
LogicExpression.Node node = logic.getRootNode();
createConstraintStructure(node, cs, codeToConstraint);
}
}
/**
* Given a LogicExpression.Node, a Map from codes to Constraint objects, and a ConstraintSet to
* put it all in, construct a tree of ConstraintSets that reflects the expression.
*
* @param node the LogicExpression.Node object
* @param cs the ConstraintSet to put the constraints in
* @param codeToConstraint a Map from constraint code to Constraint object
*/
protected static void createConstraintStructure(LogicExpression.Node node, ConstraintSet cs,
Map<String, Constraint> codeToConstraint) {
if (node instanceof LogicExpression.Variable) {
Constraint con = codeToConstraint.get(((LogicExpression.Variable) node).getName());
if (con != null) {
// If it is null, then it is probably a Loop constraint that participated in
// QueryClass collapsing.
cs.addConstraint(con);
}
} else {
LogicExpression.Operator op = (LogicExpression.Operator) node;
ConstraintSet set = null;
if (op instanceof LogicExpression.And) {
if (ConstraintOp.AND.equals(cs.getOp())) {
set = cs;
} else {
set = new ConstraintSet(ConstraintOp.AND);
}
} else {
if (ConstraintOp.OR.equals(cs.getOp())) {
set = cs;
} else {
set = new ConstraintSet(ConstraintOp.OR);
}
}
for (LogicExpression.Node child : op.getChildren()) {
createConstraintStructure(child, set, codeToConstraint);
}
if (set != cs) {
cs.addConstraint(set);
}
}
}
/**
* Add a constraint code to a logic expression, ANDed with any constraints already in the
* expression, e.g. 'A OR B' + code C -> '(A OR B) AND C'. If the expression is null a new
* expression is created.
* @param logic an existing constraint logic
* @param code the code to add
* @return a new logic expression including the new code
*/
protected static LogicExpression addToConstraintLogic(LogicExpression logic, String code) {
LogicExpression newLogic = logic;
if (logic == null) {
newLogic = new LogicExpression(code);
} else {
newLogic = new LogicExpression("(" + logic.toString() + ") AND " + code);
}
return newLogic;
}
/**
* Remove a constraint code from a logic expression, e.g. '(A OR B) AND C' -> 'B AND C'. If
* there is only one code in the expression return null.
* @param logic an existing constraint logic
* @param code the code to remove
* @return a new logic expression or null if the expression is now empty
*/
protected static LogicExpression removeFromConstraintLogic(LogicExpression logic,
String code) {
if (logic != null) {
try {
logic.removeVariable(code);
} catch (IllegalArgumentException e) {
// an IllegalArgumentException is thrown if we try to remove the root node, this
// would make an empty expression so we can just set it to null
return null;
}
}
return logic;
}
/**
* Generate a query from a PathQuery, to summarise a particular column of results.
*
* @param pathQuery the PathQuery
* @param savedBags the current saved bags map
* @param pathToQueryNode Map, into which columns to display will be placed
* @param summaryPath a String path of the column to summarise
* @param os an ObjectStore to do LOOKUP queries in
* @param classKeys class key config
* @param bagQueryConfig a BagQueryConfig object
* @param pm the ProfileManager to fetch the superuser profile from
* @param occurancesOnly Force summary to take form of item summary if true.
* @return the generated summary query
* @throws ObjectStoreException if there is a problem creating the query
*/
public static Query makeSummaryQuery(
PathQuery pathQuery,
Map<String, InterMineBag> savedBags,
Map<String, QuerySelectable> pathToQueryNode,
String summaryPath,
ObjectStore os,
Map<String, List<FieldDescriptor>> classKeys,
BagQueryConfig bagQueryConfig,
ProfileManager pm,
boolean occurancesOnly) throws ObjectStoreException {
TemplateManager templateManager = new TemplateManager(pm.getSuperuserProfile(),
os.getModel());
BagQueryRunner bagQueryRunner = new BagQueryRunner(os, classKeys, bagQueryConfig,
templateManager);
return MainHelper.makeSummaryQuery(pathQuery, summaryPath, savedBags, pathToQueryNode,
bagQueryRunner, occurancesOnly);
}
/**
* Generate a query from a PathQuery, to summarise a particular column of results.
*
* @param pathQuery the PathQuery
* @param summaryPath a String path of the column to summarise
* @param savedBags the current saved bags map
* @param pathToQueryNode Map, into which columns to display will be placed
* @param bagQueryRunner a BagQueryRunner to execute bag queries
* @return the generated summary query
* @throws ObjectStoreException if there is a problem creating the query
*/
public static Query makeSummaryQuery(
PathQuery pathQuery,
String summaryPath,
Map<String, InterMineBag> savedBags,
Map<String, QuerySelectable> pathToQueryNode,
BagQueryRunner bagQueryRunner) throws ObjectStoreException {
return makeSummaryQuery(pathQuery, summaryPath, savedBags, pathToQueryNode,
bagQueryRunner, false);
}
/**
* Generate a query from a PathQuery, to summarise a particular column of results.
*
* @param pathQuery the PathQuery
* @param summaryPath a String path of the column to summarise
* @param savedBags the current saved bags map
* @param pathToQueryNode Map, into which columns to display will be placed
* @param bagQueryRunner a BagQueryRunner to execute bag queries
* @param occurancesOnly Force summary to take form of item summary if true.
* @return the generated summary query
* @throws ObjectStoreException if there is a problem creating the query
*/
public static Query makeSummaryQuery(
PathQuery pathQuery,
String summaryPath,
Map<String, InterMineBag> savedBags,
Map<String, QuerySelectable> pathToQueryNode,
BagQueryRunner bagQueryRunner,
boolean occurancesOnly) throws ObjectStoreException {
Map<String, QuerySelectable> origPathToQueryNode = new HashMap<String, QuerySelectable>();
Query subQ = null;
subQ = makeQuery(pathQuery, savedBags, origPathToQueryNode, bagQueryRunner, null);
subQ.clearOrderBy();
Map<String, QuerySelectable> newSelect = new LinkedHashMap<String, QuerySelectable>();
Set<QuerySelectable> oldSelect = new HashSet<QuerySelectable>();
for (QuerySelectable qs : subQ.getSelect()) {
oldSelect.add(qs);
if (qs instanceof QueryClass) {
newSelect.put(subQ.getAliases().get(qs), qs);
} else if (!(qs instanceof QueryPathExpression)) {
newSelect.put(subQ.getAliases().get(qs), qs);
}
}
subQ.clearSelect();
for (Map.Entry<String, QuerySelectable> selectEntry : newSelect.entrySet()) {
subQ.addToSelect(selectEntry.getValue(), selectEntry.getKey());
}
return recursiveMakeSummaryQuery(origPathToQueryNode, summaryPath, subQ, oldSelect,
pathToQueryNode, occurancesOnly);
}
private static Query recursiveMakeSummaryQuery(
Map<String, QuerySelectable>
origPathToQueryNode,
String summaryPath,
Query subQ, Set<QuerySelectable> oldSelect,
Map<String, QuerySelectable> pathToQueryNode,
boolean occurancesOnly) {
QueryField qf = (QueryField) origPathToQueryNode.get(summaryPath);
try {
if ((qf == null) || (!subQ.getFrom().contains(qf.getFromElement()))) {
// This column may be an outer join
String prefix = summaryPath.substring(0, summaryPath.lastIndexOf('.'));
String fieldName = summaryPath.substring(summaryPath.lastIndexOf('.') + 1);
QuerySelectable qs = origPathToQueryNode.get(prefix);
if (qs == null) {
throw new NullPointerException("Error - path " + summaryPath + " is not in map "
+ origPathToQueryNode);
} else if (qs instanceof QueryObjectPathExpression) {
QueryObjectPathExpression qope = (QueryObjectPathExpression) qs;
if ((!oldSelect.contains(qs))
&& (!oldSelect.contains(new PathExpressionField(qope, 0)))) {
throw new IllegalArgumentException("QueryObjectPathExpression is too deeply"
+ " nested");
}
// We need to add QueryClasses to the query for this outer join. This will make
// it an inner join, so the "no object" results will disappear.
QueryClass lastQc = qope.getDefaultClass();
qf = new QueryField(lastQc, fieldName);
subQ.addFrom(lastQc);
subQ.addToSelect(lastQc);
QueryClass rootQc = qope.getQueryClass();
QueryHelper.addAndConstraint(subQ, new ContainsConstraint(
new QueryObjectReference(rootQc, qope.getFieldName()),
ConstraintOp.CONTAINS, lastQc));
if (qope.getConstraint() != null) {
QueryHelper.addAndConstraint(subQ, qope.getConstraint());
}
} else if (qs instanceof QueryCollectionPathExpression) {
QueryCollectionPathExpression qcpe = (QueryCollectionPathExpression) qs;
//if (qcpe.getSelect().isEmpty() && qcpe.getFrom().isEmpty()
// && oldSelect.contains(qcpe)) {
if (oldSelect.contains(qcpe)) {
QueryClass firstQc = qcpe.getDefaultClass();
qf = new QueryField(firstQc, fieldName);
subQ.addFrom(firstQc);
subQ.addToSelect(firstQc);
QueryClass rootQc = qcpe.getQueryClass();
try {
QueryHelper.addAndConstraint(subQ, new ContainsConstraint(
new QueryCollectionReference(rootQc, qcpe.getFieldName()),
ConstraintOp.CONTAINS, firstQc));
} catch (IllegalArgumentException e) {
QueryHelper.addAndConstraint(subQ, new ContainsConstraint(
new QueryObjectReference(rootQc, qcpe.getFieldName()),
ConstraintOp.CONTAINS, firstQc));
}
for (FromElement extraQc : qcpe.getFrom()) {
if (extraQc instanceof QueryClass) {
subQ.addFrom(extraQc);
subQ.addToSelect((QueryClass) extraQc);
} else {
throw new IllegalArgumentException("FromElement is not a "
+ "QueryClass: " + extraQc);
}
}
if (qcpe.getConstraint() != null) {
QueryHelper.addAndConstraint(subQ, qcpe.getConstraint());
}
} else {
throw new IllegalArgumentException("QueryCollectionPathExpression is too"
+ " complicated to summarise");
}
} else {
throw new IllegalArgumentException("Error - path " + prefix + " resolves to"
+ " unknown object " + qs);
}
}
} catch (IllegalArgumentException e) {
for (QuerySelectable qs : oldSelect) {
try {
if ((qs instanceof PathExpressionField)
&& (((PathExpressionField) qs).getFieldNumber() == 0)) {
QueryObjectPathExpression qope = ((PathExpressionField) qs).getQope();
Query tempSubQ = QueryCloner.cloneQuery(subQ);
QueryClass lastQc = qope.getDefaultClass();
tempSubQ.addFrom(lastQc);
tempSubQ.addToSelect(lastQc);
QueryClass rootQc = qope.getQueryClass();
QueryHelper.addAndConstraint(tempSubQ, new ContainsConstraint(
new QueryObjectReference(rootQc, qope.getFieldName()),
ConstraintOp.CONTAINS, lastQc));
if (qope.getConstraint() != null) {
QueryHelper.addAndConstraint(tempSubQ, qope.getConstraint());
}
return recursiveMakeSummaryQuery(origPathToQueryNode, summaryPath, tempSubQ,
new HashSet<QuerySelectable>(qope.getSelect()), pathToQueryNode,
occurancesOnly);
} else if (qs instanceof QueryCollectionPathExpression) {
QueryCollectionPathExpression qcpe = (QueryCollectionPathExpression) qs;
QueryClass firstQc = qcpe.getDefaultClass();
Query tempSubQ = QueryCloner.cloneQuery(subQ);
tempSubQ.addFrom(firstQc);
tempSubQ.addToSelect(firstQc);
QueryClass rootQc = qcpe.getQueryClass();
try {
QueryHelper.addAndConstraint(tempSubQ, new ContainsConstraint(
new QueryCollectionReference(rootQc, qcpe.getFieldName()),
ConstraintOp.CONTAINS, firstQc));
} catch (IllegalArgumentException e2) {
QueryHelper.addAndConstraint(tempSubQ, new ContainsConstraint(
new QueryObjectReference(rootQc, qcpe.getFieldName()),
ConstraintOp.CONTAINS, firstQc));
}
for (FromElement extraQc : qcpe.getFrom()) {
if (extraQc instanceof QueryClass) {
tempSubQ.addFrom(extraQc);
tempSubQ.addToSelect((QueryClass) extraQc);
} else {
throw new IllegalArgumentException("FromElement is not a "
+ "QueryClass: " + extraQc);
}
}
if (qcpe.getConstraint() != null) {
QueryHelper.addAndConstraint(tempSubQ, qcpe.getConstraint());
}
return recursiveMakeSummaryQuery(origPathToQueryNode, summaryPath, tempSubQ,
new HashSet<QuerySelectable>(qcpe.getSelect()), pathToQueryNode,
occurancesOnly);
}
} catch (IllegalArgumentException e2) {
// Ignore it - we are searching for a working branch of the query
}
}
throw new IllegalArgumentException(
"Cannot find path (" + summaryPath + ") in query", e);
}
Query q = new Query();
q.addFrom(subQ);
subQ.addToSelect(qf);
qf = new QueryField(subQ, qf);
Class<?> summaryType = qf.getType();
QueryField origQf = (QueryField) origPathToQueryNode.get(summaryPath);
String fieldName = origQf.getFieldName();
String className = Util.getFriendlyName(((QueryClass) origQf.getFromElement())
.getType());
if (!occurancesOnly && isNumeric(summaryType)
&& (!SummaryConfig.summariseAsOccurrences(className + "." + fieldName))) {
return getHistogram(subQ, qf, pathToQueryNode);
} else if ((summaryType == String.class) || (summaryType == Boolean.class)
|| (summaryType == Long.class) || (summaryType == Integer.class)
|| (summaryType == Short.class) || (summaryType == Byte.class)
|| (summaryType == Float.class) || (summaryType == Double.class)
|| (summaryType == BigDecimal.class)) {
q.addToSelect(qf);
q.addToGroupBy(qf);
QueryNode count = new QueryFunction();
q.addToSelect(count);
pathToQueryNode.put(summaryPath, qf);
pathToQueryNode.put("Occurrences", count);
q.addToOrderBy(new OrderDescending(count));
} else {
// Probably Date
throw new IllegalArgumentException("Cannot summarise this column");
}
return q;
}
private static boolean isNumeric(Class<?> summaryType) {
return (summaryType == Long.class) || (summaryType == Integer.class)
|| (summaryType == Short.class) || (summaryType == Byte.class)
|| (summaryType == Float.class) || (summaryType == Double.class)
|| (summaryType == BigDecimal.class);
}
/**
* Produce a histogram query for a numerical column.
*
* In addition to the bucket number and the count for each bucket, each row also includes
* the general statistics previously supplied for backwards compatibility.
*
* BASIC IDEA:
* <pre>
* select bq.max, bq.min, sum(bq.c) as total, bq.bucket, from (
* select count(*) as c,
* q1.value as val,
* width_bucket(q1.value, q2.min, (q2.max * 1.01), 10) as bucket,
* q2.max as max,
* q2.min as min
* from (select v.value from values as v) as vals,
* (select max(v.value) as max, min(v.value) as min from values as v) as stats
* group by vals.value, stats.min, stats.max order by bucket, vals.value
* ) as bq
* group by bq.bucket, bq.max, bq.min
* order by bq.bucket;
* </pre>
*
* @param subq The source of the data.
* @param qf The field that contains the numerical information we are interested in.
* @param pathToQueryNode The map to update with names of columns.
* @return A query that when run will return a result set where each row has a bin number
* where 1 <= binNumber <= configuredMaxNoOfBins and a number of items in the data
* set that belong in the given bin.
*/
private static Query getHistogram(
Query source,
QueryField qf,
Map<String, QuerySelectable> pathToQueryNode) {
// Inner 1
Query vq = new Query();
vq.addFrom(source);
vq.addToSelect(qf);
vq.setDistinct(false);
// Inner 2
Query statsq = new Query();
statsq.addFrom(source);
QueryFunction min = new QueryFunction(qf, QueryFunction.MIN);
QueryFunction max = new QueryFunction(qf, QueryFunction.MAX);
QueryFunction avg = new QueryFunction(qf, QueryFunction.AVERAGE);
QueryFunction stddev = new QueryFunction(qf, QueryFunction.STDDEV);
QueryEvaluable bins = new QueryValue(SummaryConfig.getNumberOfBins());
Class<?> summaryType = qf.getType();
if (summaryType == Long.class || summaryType == Integer.class) {
bins = new QueryExpression(
bins, QueryExpression.LEAST,
new QueryExpression(max, QueryExpression.SUBTRACT, min)
);
}
statsq.addToSelect(min);
statsq.addToSelect(max);
statsq.addToSelect(avg);
statsq.addToSelect(stddev);
statsq.addToSelect(bins);
// Inner 3
Query bucketq = new Query();
bucketq.setDistinct(false);
QueryFunction count = new QueryFunction();
QueryField val = new QueryField(vq, qf);
QueryField maxval = new QueryField(statsq, max);
QueryField minval = new QueryField(statsq, min);
QueryField meanval = new QueryField(statsq, avg);
QueryField devval = new QueryField(statsq, stddev);
QueryExpression upperBound = new QueryExpression(
new QueryCast(maxval, BigDecimal.class),
QueryExpression.MULTIPLY,
new QueryCast(new QueryValue(new Double(1.01)), BigDecimal.class));
QueryField noOfBuckets = new QueryField(statsq, bins);
QueryFunction bucket = new WidthBucketFunction(val, minval, upperBound, noOfBuckets);
bucketq.addFrom(vq);
bucketq.addFrom(statsq);
bucketq.addToSelect(count);
bucketq.addToSelect(val);
bucketq.addToSelect(maxval);
bucketq.addToSelect(minval);
bucketq.addToSelect(meanval);
bucketq.addToSelect(devval);
bucketq.addToSelect(bucket);
bucketq.addToSelect(noOfBuckets);
bucketq.addToGroupBy(val);
bucketq.addToGroupBy(maxval);
bucketq.addToGroupBy(minval);
bucketq.addToGroupBy(meanval);
bucketq.addToGroupBy(devval);
bucketq.addToGroupBy(noOfBuckets);
bucketq.addToOrderBy(bucket);
bucketq.addToOrderBy(val);
// Outer
Query q = new Query();
QueryField bmax = new QueryField(bucketq, maxval);
QueryField bmin = new QueryField(bucketq, minval);
QueryField bmean = new QueryField(bucketq, meanval);
QueryField bdev = new QueryField(bucketq, devval);
QueryField bbucket = new QueryField(bucketq, bucket);
QueryFunction bucketTotal = new QueryFunction(
new QueryField(bucketq, count), QueryFunction.SUM);
QueryField buckets = new QueryField(bucketq, noOfBuckets);
q.addFrom(bucketq);
q.addToSelect(bmin);
q.addToSelect(bmax);
q.addToSelect(bmean);
q.addToSelect(bdev);
q.addToSelect(buckets);
q.addToSelect(bbucket);
q.addToSelect(bucketTotal);
q.addToGroupBy(bmin);
q.addToGroupBy(bmax);
q.addToGroupBy(bmean);
q.addToGroupBy(bdev);
q.addToGroupBy(bbucket);
q.addToGroupBy(buckets);
q.addToOrderBy(bbucket);
pathToQueryNode.put("Minimum", bmin);
pathToQueryNode.put("Maximum", bmax);
pathToQueryNode.put("Average", bmean);
pathToQueryNode.put("Standard Deviation", bdev);
pathToQueryNode.put("Buckets", bucketTotal);
pathToQueryNode.put("Bucket", bbucket);
pathToQueryNode.put("Occurances", bucketTotal);
return q;
}
/**
* @param props properties to configure the range queries
*/
public static void loadHelpers(Properties props) {
RangeConfig.loadHelpers(props);
}
// Allow collections with stable orderings by class name.
private static final class ClassNameComparator implements Comparator<Class<?>>
{
@Override
public int compare(Class<?> o1, Class<?> o2) {
return o1.getName().compareTo(o2.getName());
}
}
/**
* @author Alex
*/
protected static final class RangeConfig
{
private RangeConfig() {
// Restricted constructor.
}
protected static Map<Class<?>, RangeHelper> rangeHelpers;
static {
init();
}
/**
* reset
*/
protected static void reset() {
init();
}
private static void init() {
rangeHelpers = new HashMap<Class<?>, RangeHelper>();
// Default basic helpers.
// rangeHelpers.put(int.class, new IntHelper());
// rangeHelpers.put(Integer.class, new IntHelper());
// rangeHelpers.put(String.class, new StringHelper());
loadHelpers(PropertiesUtil.getProperties());
}
/**
* @param allProps all properties
*/
@SuppressWarnings("unchecked")
protected static void loadHelpers(Properties allProps) {
Properties props = PropertiesUtil.getPropertiesStartingWith("pathquery.range.",
allProps);
for (String key: props.stringPropertyNames()) {
String[] parts = key.split("\\.", 3);
if (parts.length != 3) {
throw new IllegalStateException(
"Property names must be in the format "
+ "pathquery.range.${FullyQualifiedClassName}, got '" + key + "'"
);
}
String targetTypeName = parts[2];
Class<?> targetType;
try {
targetType = Class.forName(targetTypeName);
} catch (ClassNotFoundException e) {
throw new RuntimeException("Cannot find class named in config: '" + key
+ "'", e);
}
String helperName = props.getProperty(key);
Class<RangeHelper> helperType;
try {
helperType = (Class<RangeHelper>) Class.forName(helperName);
} catch (ClassNotFoundException e) {
throw new RuntimeException("Cannot find class named in congfig: '" + helperName
+ "'");
}
RangeHelper helper;
try {
helper = helperType.newInstance();
} catch (InstantiationException e) {
throw new RuntimeException("Could not instantiate range helper for '" + key
+ "'", e);
} catch (IllegalAccessException e) {
throw new RuntimeException("Could not instantiate range helper for '" + key
+ "'", e);
}
rangeHelpers.put(targetType, helper);
LOG.info("ADDED RANGE HELPER FOR " + targetType + " (" + helperType.getName()
+ ")");
}
}
/**
* @param type class
* @return true if there is helper for this class of object
*/
public static boolean hasHelperForType(Class<?> type) {
return rangeHelpers.containsKey(type);
}
/**
* @param type type
* @return helper for given type
*/
public static RangeHelper getHelper(Class<?> type) {
return rangeHelpers.get(type);
}
}
/**
* @return set of classes that are legal to use with range constraints
*/
public static Set<Class<?>> getValidRangeTargets() {
return RangeConfig.rangeHelpers.keySet();
}
/**
* Controls access to configuration information on which fields should be summarised as a count
* of occurrences.
*
* @author Matthew Wakeling
*/
protected static final class SummaryConfig
{
private SummaryConfig() {
}
private static Set<String> config;
static {
config = new HashSet<String>();
String stringConfig = PropertiesUtil.getProperties()
.getProperty("querySummary.summariseAsOccurrences");
if (stringConfig != null) {
String[] stringConfigs = stringConfig.split(",");
for (String configEntry : stringConfigs) {
configEntry = configEntry.trim();
if (configEntry.contains(" ")) {
throw new IllegalArgumentException("querySummary.summariseAsOccurrences "
+ "property contains an entry with a space: \"" + configEntry
+ "\". Entries should be comma-separated.");
}
config.add(configEntry);
}
}
}
/**
* Returns whether the given field name is configured to be summarised as a count of
* occurrences.
*
* @param fieldName a class name, a dot, and a field name
* @return true if the field should be summarised as a count of occurrences, false for
* a mean and standard deviation.
*/
public static boolean summariseAsOccurrences(String fieldName) {
return config.contains(fieldName);
}
/**
* Returns the number of bins to split a histogram into.
* @return The number of bins.
*/
public static Integer getNumberOfBins() {
return Integer.valueOf(
PropertiesUtil.getProperties().getProperty("querySummary.no-of-bins", "20"));
}
}
/**
* @param q field
* @param node class
* @param con contraint
* @return range constraint
*/
public static Constraint makeRangeConstraint(
Queryable q,
QueryNode node,
PathConstraintRange con) {
Class<?> type = node.getType();
if (RangeConfig.hasHelperForType(type)) {
RangeHelper helper = RangeConfig.getHelper(type);
return helper.createConstraint(q, node, con);
}
throw new RuntimeException("No range constraints are possible for paths of type "
+ type.getName());
}
}
| intermine/api/main/src/org/intermine/api/query/MainHelper.java | package org.intermine.api.query;
/*
* Copyright (C) 2002-2016 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.TimeZone;
import java.util.TreeSet;
import org.apache.log4j.Logger;
import org.intermine.InterMineException;
import org.intermine.api.bag.BagQueryConfig;
import org.intermine.api.bag.BagQueryResult;
import org.intermine.api.bag.BagQueryRunner;
import org.intermine.api.profile.InterMineBag;
import org.intermine.api.profile.ProfileManager;
import org.intermine.api.template.TemplateManager;
import org.intermine.metadata.ClassDescriptor;
import org.intermine.metadata.ConstraintOp;
import org.intermine.metadata.FieldDescriptor;
import org.intermine.metadata.Model;
import org.intermine.metadata.TypeUtil;
import org.intermine.metadata.Util;
import org.intermine.objectstore.ObjectStore;
import org.intermine.objectstore.ObjectStoreException;
import org.intermine.objectstore.query.BagConstraint;
import org.intermine.objectstore.query.ClassConstraint;
import org.intermine.objectstore.query.Constraint;
import org.intermine.objectstore.query.ConstraintSet;
import org.intermine.objectstore.query.ContainsConstraint;
import org.intermine.objectstore.query.FromElement;
import org.intermine.objectstore.query.OrderDescending;
import org.intermine.objectstore.query.PathExpressionField;
import org.intermine.objectstore.query.Query;
import org.intermine.objectstore.query.QueryCast;
import org.intermine.objectstore.query.QueryClass;
import org.intermine.objectstore.query.QueryCloner;
import org.intermine.objectstore.query.QueryCollectionPathExpression;
import org.intermine.objectstore.query.QueryCollectionReference;
import org.intermine.objectstore.query.QueryEvaluable;
import org.intermine.objectstore.query.QueryExpression;
import org.intermine.objectstore.query.QueryField;
import org.intermine.objectstore.query.QueryFunction;
import org.intermine.objectstore.query.QueryHelper;
import org.intermine.objectstore.query.QueryNode;
import org.intermine.objectstore.query.QueryObjectPathExpression;
import org.intermine.objectstore.query.QueryObjectReference;
import org.intermine.objectstore.query.QueryPathExpression;
import org.intermine.objectstore.query.QueryPathExpressionWithSelect;
import org.intermine.objectstore.query.QuerySelectable;
import org.intermine.objectstore.query.QueryValue;
import org.intermine.objectstore.query.Queryable;
import org.intermine.objectstore.query.SimpleConstraint;
import org.intermine.objectstore.query.WidthBucketFunction;
import org.intermine.pathquery.LogicExpression;
import org.intermine.pathquery.OrderDirection;
import org.intermine.pathquery.OrderElement;
import org.intermine.pathquery.OuterJoinStatus;
import org.intermine.pathquery.Path;
import org.intermine.pathquery.PathConstraint;
import org.intermine.pathquery.PathConstraintAttribute;
import org.intermine.pathquery.PathConstraintBag;
import org.intermine.pathquery.PathConstraintIds;
import org.intermine.pathquery.PathConstraintLookup;
import org.intermine.pathquery.PathConstraintLoop;
import org.intermine.pathquery.PathConstraintMultiValue;
import org.intermine.pathquery.PathConstraintMultitype;
import org.intermine.pathquery.PathConstraintNull;
import org.intermine.pathquery.PathConstraintRange;
import org.intermine.pathquery.PathConstraintSubclass;
import org.intermine.pathquery.PathException;
import org.intermine.pathquery.PathQuery;
import org.intermine.util.PropertiesUtil;
/**
* Helper methods for main controller and main action
* @author Mark Woodbridge
* @author Thomas Riley
* @author Matthew Wakeling
*/
public final class MainHelper
{
private MainHelper() {
}
private static final Logger LOG = Logger.getLogger(MainHelper.class);
private static final LookupTokeniser LOOKUP_TOKENISER = LookupTokeniser.getLookupTokeniser();
/**
* Converts a PathQuery object into an ObjectStore Query object, and optionally populates a Map
* from String path in the PathQuery to the object in the Query that represents it.
*
* @param pathQuery the PathQuery
* @param savedBags the current saved bags map (a Map from bag name to InterMineBag)
* @param pathToQueryNode optional parameter which will be populated with entries, mapping from
* String path in the pathQuery to objects in the result Query
* @param bagQueryRunner a BagQueryRunner to use to perform LOOKUPs
* @param returnBagQueryResults optional parameter in which any BagQueryResult objects can be
* returned
* @return an ObjectStore Query object
* @throws ObjectStoreException if something goes wrong
*/
public static Query makeQuery(PathQuery pathQuery, Map<String, InterMineBag> savedBags,
Map<String, QuerySelectable> pathToQueryNode, BagQueryRunner bagQueryRunner,
Map<String, BagQueryResult> returnBagQueryResults) throws ObjectStoreException {
synchronized (pathQuery) {
List<String> problems = pathQuery.verifyQuery();
if (!problems.isEmpty()) {
throw new ObjectStoreException("PathQuery is invalid: " + problems);
}
Query q = new Query();
try {
makeQuery(q, pathQuery.getRootClass(), pathQuery, savedBags, pathToQueryNode,
bagQueryRunner, returnBagQueryResults);
} catch (PathException e) {
throw new Error("PathQuery is invalid, but was valid earlier", e);
}
return q;
}
}
/**
* Converts a PathQuery object into an ObjectStore Query object, and optionally populates a Map
* from String path in the PathQuery to the object in the Query that represents it. This is the
* recursive private method that performs the algorithm.
*
* @param q a Query, QueryObjectPathExpression, or QueryCollectionPathExpression, depending on
* the level of recursion reached so far
* @param root the path representing the level of recursion - we will process this outer join
* group
* @param query the PathQuery
* @param savedBags the current saved bags map (a Map from bag name to InterMineBag)
* @param pathToQueryNode optional parameter which will be populated with entries, mapping from
* String path in the pathQuery to objects in the result Query
* @param bagQueryRunner a BagQueryRunner to use to perform LOOKUPs
* @param returnBagQueryResults optional parameter in which any BagQueryResult objects can be
* returned
* @throws ObjectStoreException if something goes wrong
*/
private static void makeQuery(Queryable q, String root, PathQuery query,
Map<String, InterMineBag> savedBags, Map<String, QuerySelectable> pathToQueryNode,
BagQueryRunner bagQueryRunner,
Map<String, BagQueryResult> returnBagQueryResults) throws ObjectStoreException {
PathQuery pathQuery = query;
Model model = pathQuery.getModel();
// We need to call getQueryToExecute() first. For template queries this gets a query that
// excludes any optional constraints that have been switched off. A normal PathQuery is
// unchanged.
pathQuery = pathQuery.getQueryToExecute();
try {
// This is the root constraint set that will be set in the query
ConstraintSet andCs = new ConstraintSet(ConstraintOp.AND);
// This is the Map that stores what we will put in pathToQueryNode. Because we can't
// trust what is in there already, we use a separate variable and copy across afterwards
Map<String, QuerySelectable> queryBits = new HashMap<String, QuerySelectable>();
// If we have recursed, and are operating on a PathExpression, then we need to extract
// the default class which was set up in the parent group and add it to the queryBits
if (q instanceof QueryObjectPathExpression) {
queryBits.put(root, ((QueryObjectPathExpression) q).getDefaultClass());
} else if (q instanceof QueryCollectionPathExpression) {
queryBits.put(root, ((QueryCollectionPathExpression) q).getDefaultClass());
}
// This is a Map from main path to outer join group of all classes in the query
Map<String, String> outerJoinGroups = pathQuery.getOuterJoinGroups();
// This is the subclass map from the query, for creating Path objects
Map<String, String> subclasses = pathQuery.getSubclasses();
// Get the logic expression for the relevant outer join group, and the list of
// relevant constraint codes
Set<String> relevantCodes = pathQuery.getConstraintGroups().get(root);
LogicExpression logic = pathQuery.getConstraintLogicForGroup(root);
logic = handleNullOuterJoins(root, pathQuery, model, relevantCodes,
logic);
// This is the set of loop constraints that participate in the class collapsing
// mechanism. All others must have a ClassConstraint generated for them.
Set<PathConstraintLoop> participatingLoops = findParticipatingLoops(logic, pathQuery
.getConstraints());
// This is the map of EQUALS loop constraints, from the path that should be omitted
// from the Query to the path that represents both paths.
Map<String, String> loops = makeLoopsMap(participatingLoops);
// Get any paths in the query that are constrained to be NULL/NOT NULL references or
// collections AND don't appear in other constraints or the query view. These will only
// be accessed in an EXISTS subquery and shouldn't be add to the FROM.
Map<String, String> pathConstraintNullOnly =
getPathConstraintNulls(model, pathQuery, true);
// Set up queue system. We don't know what order we want to process these entries in,
// so a queue allows us to put one we can't process yet to the back of the queue to
// process later
LinkedList<String> queue = new LinkedList<String>();
for (String path : outerJoinGroups.keySet()) {
queue.addLast(path);
}
Map<String, String> deferralReasons = new HashMap<String, String>();
int queueDeferred = 0;
// This is a Map of PathExpression objects that have been created. They will be added to
// the SELECT list later on, when we can determine the correct order in the SELECT list.
Map<String, QueryPathExpressionWithSelect> pathExpressions
= new HashMap<String, QueryPathExpressionWithSelect>();
while (!queue.isEmpty()) {
if (queueDeferred > queue.size() + 2) {
throw new IllegalArgumentException("Cannot handle entries in queue: " + queue
+ ", reasons: " + deferralReasons + ", root = " + root);
}
String stringPath = queue.removeFirst();
deferralReasons.remove(stringPath);
Path path = new Path(model, stringPath, subclasses);
String outerJoinGroup = outerJoinGroups.get(stringPath);
if (path.isRootPath()) {
// This is the root path. Just add the QueryClass, no further action.
if (root.equals(outerJoinGroup)) {
// This class is relevant to this outer join group
QueryClass qc = new QueryClass(path.getEndType());
((Query) q).addFrom(qc);
queryBits.put(stringPath, qc);
}
} else if (stringPath.equals(root)) {
// We are on the root of an outer join. No action required
} else {
String parent = path.getPrefix().getNoConstraintsString();
QueryClass parentQc = (QueryClass) ((queryBits.get(parent)
instanceof QueryClass) ? queryBits.get(parent) : null);
if (parentQc == null) {
if (root.equals(outerJoinGroups.get(parent))) {
// We cannot process this path yet. It depends on a parent that hasn't
// been processed yet. Put it to the back of the queue.
deferralReasons.put(stringPath, "Could not process path " + stringPath
+ " because its parent has not yet been processed");
queue.addLast(stringPath);
queueDeferred++;
continue;
}
} else {
if (root.equals(outerJoinGroup)) {
// This class is relevant to this outer join group
QueryClass qc;
if (loops.containsKey(stringPath)) {
// This path is looped on another path
qc = (QueryClass) queryBits.get(loops.get(stringPath));
if (qc == null) {
deferralReasons.put(stringPath, "Could not process path "
+ stringPath + " because it is looped onto a class ("
+ loops.get(stringPath) + ") that has not been "
+ "processed yet");
queue.addLast(stringPath);
queueDeferred++;
continue;
}
} else {
qc = new QueryClass(path.getEndType());
if (!pathConstraintNullOnly.containsKey(path.toString())) {
if (q instanceof Query) {
((Query) q).addFrom(qc);
} else {
((QueryCollectionPathExpression) q).addFrom(qc);
}
}
}
// unless there is ONLY a null constraint on this ref/col path we need
// to add a contains constraint to make the join
if (!pathConstraintNullOnly.containsKey(stringPath)) {
if (path.endIsReference()) {
andCs.addConstraint(new ContainsConstraint(
new QueryObjectReference(parentQc,
path.getLastElement()), ConstraintOp.CONTAINS,
qc));
} else {
andCs.addConstraint(new ContainsConstraint(
new QueryCollectionReference(parentQc,
path.getLastElement()), ConstraintOp.CONTAINS,
qc));
}
}
queryBits.put(stringPath, qc);
} else {
// This is a path from another outer join group. We only need to act if
// the parent path is from this outer join group - in that case, we
// make a PathExpression and recurse
if (root.equals(outerJoinGroups.get(parent))) {
// We need to act. However, first we need to know whether to use a
// collection or reference path expression
boolean isCollection = path.endIsCollection();
// Even if this is false, we may still need to upgrade to collection
// if there are multiple paths in the outer join group
if (!isCollection) {
int groupSize = 0;
for (Map.Entry<String, String> entry
: outerJoinGroups.entrySet()) {
if (outerJoinGroup.equals(entry.getValue())) {
groupSize++;
}
}
if (groupSize > 1) {
isCollection = true;
}
}
if (isCollection) {
QueryCollectionPathExpression qn
= new QueryCollectionPathExpression(parentQc,
path.getLastElement(), path.getEndType());
makeQuery(qn, stringPath, pathQuery, savedBags,
pathToQueryNode, bagQueryRunner, returnBagQueryResults);
queryBits.put(stringPath, qn);
pathExpressions.put(stringPath, qn);
} else {
QueryObjectPathExpression qn
= new QueryObjectPathExpression(parentQc,
path.getLastElement(), path.getEndType());
makeQuery(qn, stringPath, pathQuery, savedBags, pathToQueryNode,
bagQueryRunner, returnBagQueryResults);
queryBits.put(stringPath, qn);
pathExpressions.put(stringPath, qn);
}
}
}
}
}
deferralReasons.remove(stringPath);
queueDeferred = 0;
}
Map<String, Constraint> codeToConstraint = putConstraintsInMap(q,
savedBags, bagQueryRunner, returnBagQueryResults,
pathQuery, model, queryBits, subclasses, relevantCodes,
participatingLoops);
// Use the constraint logic to create a ConstraintSet structure with the constraints
// inserted into it
createConstraintStructure(logic, andCs, codeToConstraint);
setConstraints(q, andCs);
List<QuerySelectable> select = generateSelectList(root, pathQuery, model, queryBits,
outerJoinGroups, subclasses, pathExpressions);
copySelectList(q, select);
generateOrderBy(q, pathQuery, model, queryBits, subclasses);
if (pathToQueryNode != null) {
pathToQueryNode.putAll(queryBits);
}
} catch (PathException e) {
throw new ObjectStoreException("PathException while converting PathQuery to ObjectStore"
+ " Query", e);
}
}
private static LogicExpression handleNullOuterJoins(String root,
PathQuery pathQuery, Model model, Set<String> relevantCodes,
LogicExpression logicExpression) {
LogicExpression logic = logicExpression;
// This is complicated - for NULL/NOT NULL constraints on refs/cols that span an outer
// join boundary we need the constraint to be on the left side of the boundary, i.e.
// in the main part of the query rather than subquery on the select. We may need to
// move a constraint code from another outer join group.
// e.g. Company.departments IS_NOT_NULL and Company.departments is an outer join
Map<String, String> nullRefColConstraints = getPathConstraintNulls(model, pathQuery,
false);
for (String constraintPath : nullRefColConstraints.keySet()) {
OuterJoinStatus ojs = pathQuery.getOuterJoinStatus(constraintPath);
if (ojs == OuterJoinStatus.OUTER) {
// which side of outer join are we on?
if (root.split("\\.").length < constraintPath.split("\\.").length) {
// we're on the left side of the outer join so we want to add this
// constraint to the relevant codes now
String code = nullRefColConstraints.get(constraintPath);
if (!relevantCodes.contains(code)) {
relevantCodes.add(code);
logic = addToConstraintLogic(logic, code);
}
} else {
// we've recursed into an outer join so we don't want to process this
// constraint now, remove it if it's in the relevant codes
String code = nullRefColConstraints.get(constraintPath);
if (relevantCodes.contains(code)) {
relevantCodes.remove(code);
logic = removeFromConstraintLogic(logic, code);
}
}
}
}
return logic;
}
private static void setConstraints(Queryable q, ConstraintSet andCs) {
if (!andCs.getConstraints().isEmpty()) {
Constraint c = andCs;
while ((c instanceof ConstraintSet)
&& (((ConstraintSet) c).getConstraints().size() == 1)) {
c = ((ConstraintSet) c).getConstraints().iterator().next();
}
q.setConstraint(c);
}
}
private static void copySelectList(Queryable q, List<QuerySelectable> select) {
// Copy select list into query:
QueryClass defaultClass = null;
if (q instanceof QueryObjectPathExpression) {
defaultClass = ((QueryObjectPathExpression) q).getDefaultClass();
}
if ((select.size() == 1) && select.get(0).equals(defaultClass)) {
// Don't add anything to the SELECT list - default is fine
} else {
for (QuerySelectable qs : select) {
if (qs instanceof QueryObjectPathExpression) {
QueryObjectPathExpression qope = (QueryObjectPathExpression) qs;
if (qope.getSelect().size() > 1) {
for (int i = 0; i < qope.getSelect().size(); i++) {
q.addToSelect(new PathExpressionField(qope, i));
}
} else {
q.addToSelect(qope);
}
} else {
q.addToSelect(qs);
}
}
}
}
private static void generateOrderBy(Queryable q, PathQuery pathQuery,
Model model, Map<String, QuerySelectable> queryBits,
Map<String, String> subclasses) throws PathException {
if (q instanceof Query) {
Query qu = (Query) q;
for (OrderElement order : pathQuery.getOrderBy()) {
QueryField qf = (QueryField) queryBits.get(order.getOrderPath());
if (qf == null) {
Path path = new Path(model, order.getOrderPath(), subclasses);
QueryClass qc = (QueryClass) queryBits.get(path.getPrefix()
.getNoConstraintsString());
qf = new QueryField(qc, path.getLastElement());
queryBits.put(order.getOrderPath(), qf);
}
if ((!qu.getOrderBy().contains(qf)) && (!qu.getOrderBy()
.contains(new OrderDescending(qf)))) {
if (order.getDirection().equals(OrderDirection.DESC)) {
qu.addToOrderBy(new OrderDescending(qf));
} else {
qu.addToOrderBy(qf);
}
}
}
for (String view : pathQuery.getView()) {
QueryField qf = (QueryField) queryBits.get(view);
if (qf != null) {
// If qf IS null, that means it is in another outer join group, as we have
// populated queryBits earlier with all view objects
if ((!qu.getOrderBy().contains(qf)) && (!qu.getOrderBy()
.contains(new OrderDescending(qf)))) {
qu.addToOrderBy(qf);
}
}
}
}
}
// Generate the SELECT list
private static List<QuerySelectable> generateSelectList(String root,
PathQuery pathQuery, Model model,
Map<String, QuerySelectable> queryBits,
Map<String, String> outerJoinGroups,
Map<String, String> subclasses,
Map<String, QueryPathExpressionWithSelect> pathExpressions)
throws PathException {
HashSet<String> pathExpressionsDone = new HashSet<String>();
List<QuerySelectable> select = new ArrayList<QuerySelectable>();
for (String view : pathQuery.getView()) {
Path path = new Path(model, view, subclasses);
String parentPath = path.getPrefix().getNoConstraintsString();
String outerJoinGroup = outerJoinGroups.get(parentPath);
if (root.equals(outerJoinGroup)) {
QueryClass qc = (QueryClass) queryBits.get(parentPath);
QueryField qf = new QueryField(qc, path.getLastElement());
queryBits.put(view, qf);
if (!select.contains(qc)) {
select.add(qc);
}
} else {
while ((!path.isRootPath())
&& (!root.equals(outerJoinGroups.get(path.getPrefix()
.getNoConstraintsString())))) {
path = path.getPrefix();
}
if (!path.isRootPath()) {
// We have found a path in the view that is a path expression we want to
// use
view = path.getNoConstraintsString();
if (!pathExpressionsDone.contains(view)) {
QueryPathExpressionWithSelect pe = pathExpressions.get(view);
QueryClass qc = pe.getQueryClass();
if (!select.contains(qc)) {
select.add(qc);
}
if (!select.contains(pe)) {
select.add(pe);
}
}
}
}
}
return select;
}
private static Map<String, Constraint> putConstraintsInMap(Queryable q,
Map<String, InterMineBag> savedBags, BagQueryRunner bagQueryRunner,
Map<String, BagQueryResult> returnBagQueryResults,
PathQuery pathQuery, Model model,
Map<String, QuerySelectable> queryBits,
Map<String, String> subclasses, Set<String> relevantCodes,
Set<PathConstraintLoop> participatingLoops) throws PathException,
BagNotFound, ObjectStoreException {
// For each of the relevant codes, produce a Constraint object, and put it in a Map.
// Constraints that do not have a code (namely loop NOT EQUALS) can be put straight into
// the andCs.
Map<String, Constraint> codeToConstraint = new HashMap<String, Constraint>();
for (Map.Entry<PathConstraint, String> entry : pathQuery.getConstraints().entrySet()) {
String code = entry.getValue();
if (relevantCodes.contains(code)) {
PathConstraint constraint = entry.getKey();
String stringPath = constraint.getPath();
Path path = new Path(model, stringPath, subclasses);
QuerySelectable field = queryBits.get(constraint.getPath());
if (field == null) {
// This must be a constraint on an attribute, as all the classes will
// already be in querybits
QueryClass qc = (QueryClass) queryBits.get(path.getPrefix()
.getNoConstraintsString());
field = new QueryField(qc, path.getLastElement());
queryBits.put(stringPath, field);
}
if (constraint instanceof PathConstraintAttribute) {
PathConstraintAttribute pca = (PathConstraintAttribute) constraint;
Class<?> fieldType = path.getEndType();
if (String.class.equals(fieldType)) {
codeToConstraint.put(code, makeQueryStringConstraint(
(QueryField) field, pca));
} else if (Date.class.equals(fieldType)) {
codeToConstraint.put(code, makeQueryDateConstraint(
(QueryField) field, pca));
} else {
// Use simple forms of operators when not dealing with strings.
ConstraintOp simpleOp = ConstraintOp.EXACT_MATCH == pca.getOp()
? ConstraintOp.EQUALS
: ConstraintOp.STRICT_NOT_EQUALS == pca.getOp()
? ConstraintOp.NOT_EQUALS : pca.getOp();
codeToConstraint.put(code, new SimpleConstraint((QueryField) field,
simpleOp, new QueryValue(TypeUtil.stringToObject(
fieldType, pca.getValue()))));
}
} else if (constraint instanceof PathConstraintNull) {
if (path.endIsAttribute()) {
codeToConstraint.put(code, new SimpleConstraint((QueryField) field,
constraint.getOp()));
} else {
String parent = path.getPrefix().getNoConstraintsString();
QueryClass parentQc = (QueryClass) ((queryBits.get(parent)
instanceof QueryClass) ? queryBits.get(parent) : null);
if (path.endIsReference()) {
QueryObjectReference qr = new QueryObjectReference(parentQc,
path.getLastElement());
codeToConstraint.put(code, new ContainsConstraint(qr,
constraint.getOp()));
} else { // collection
QueryCollectionReference qr = new QueryCollectionReference(parentQc,
path.getLastElement());
codeToConstraint.put(code, new ContainsConstraint(qr,
constraint.getOp()));
}
}
} else if (constraint instanceof PathConstraintLoop) {
// We need to act if this is not a participating constraint - otherwise
// this has been taken care of above.
if (!participatingLoops.contains(constraint)) {
PathConstraintLoop pcl = (PathConstraintLoop) constraint;
if (pcl.getPath().length() > pcl.getLoopPath().length()) {
codeToConstraint.put(code, new ClassConstraint((QueryClass)
queryBits.get(pcl.getLoopPath()), constraint.getOp(),
(QueryClass) field));
} else {
codeToConstraint.put(code, new ClassConstraint((QueryClass) field,
constraint.getOp(), (QueryClass) queryBits
.get(((PathConstraintLoop) constraint).getLoopPath())));
}
}
} else if (constraint instanceof PathConstraintSubclass) {
// No action needed.
} else if (constraint instanceof PathConstraintBag) {
PathConstraintBag pcb = (PathConstraintBag) constraint;
InterMineBag bag = savedBags.get(pcb.getBag());
if (bag == null) {
throw new BagNotFound(pcb.getBag());
}
codeToConstraint.put(code, new BagConstraint((QueryNode) field, pcb.getOp(),
bag.getOsb()));
} else if (constraint instanceof PathConstraintIds) {
codeToConstraint.put(code, new BagConstraint(new QueryField(
(QueryClass) field, "id"), constraint.getOp(),
((PathConstraintIds) constraint).getIds()));
} else if (constraint instanceof PathConstraintRange) {
PathConstraintRange pcr = (PathConstraintRange) constraint;
codeToConstraint.put(code, makeRangeConstraint(q, (QueryNode) field, pcr));
} else if (constraint instanceof PathConstraintMultitype) {
PathConstraintMultitype pcmt = (PathConstraintMultitype) constraint;
codeToConstraint.put(code, makeMultiTypeConstraint(pathQuery.getModel(),
(QueryNode) field, pcmt));
} else if (constraint instanceof PathConstraintMultiValue) {
Class<?> fieldType = path.getEndType();
if (String.class.equals(fieldType)) {
codeToConstraint.put(code, new BagConstraint((QueryField) field,
constraint.getOp(), ((PathConstraintMultiValue) constraint)
.getValues()));
} else {
Collection<Object> objects = new ArrayList<Object>();
for (String s : ((PathConstraintMultiValue) constraint).getValues()) {
objects.add(TypeUtil.stringToObject(fieldType, s));
}
codeToConstraint.put(code, new BagConstraint((QueryField) field,
constraint.getOp(), objects));
}
} else if (constraint instanceof PathConstraintLookup) {
QueryClass qc = (QueryClass) field;
PathConstraintLookup pcl = (PathConstraintLookup) constraint;
if (bagQueryRunner == null) {
throw new NullPointerException("Cannot convert this PathQuery to an "
+ "ObjectStore Query without a BagQueryRunner");
}
String identifiers = pcl.getValue();
BagQueryResult bagQueryResult;
List<String> identifierList = LOOKUP_TOKENISER.tokenise(identifiers);
try {
bagQueryResult = bagQueryRunner.searchForBag(qc.getType()
.getSimpleName(), identifierList, pcl.getExtraValue(), true);
} catch (ClassNotFoundException e) {
throw new ObjectStoreException(e);
} catch (InterMineException e) {
throw new ObjectStoreException(e);
}
codeToConstraint.put(code, new BagConstraint(new QueryField(qc, "id"),
ConstraintOp.IN, bagQueryResult.getMatchAndIssueIds()));
if (returnBagQueryResults != null) {
returnBagQueryResults.put(stringPath, bagQueryResult);
}
} else {
throw new ObjectStoreException("Unknown constraint type "
+ constraint.getClass().getName());
}
}
}
return codeToConstraint;
}
/**
* Construct a new multi-type constraint.
* @param model The model to look for types within.
* @param field The subject of the constraint.
* @param pcmt The constraint itself.
* @return A constraint.
* @throws ObjectStoreException if the constraint names types that are not in the model.
*/
protected static Constraint makeMultiTypeConstraint(
Model model,
QueryNode field,
PathConstraintMultitype pcmt) throws ObjectStoreException {
QueryField typeClass = new QueryField((QueryClass) field, "class");
ConstraintOp op = (pcmt.getOp() == ConstraintOp.ISA)
? ConstraintOp.IN : ConstraintOp.NOT_IN;
Set<Class<?>> classes = new TreeSet<Class<?>>(new ClassNameComparator());
for (String name: pcmt.getValues()) {
ClassDescriptor cd = model.getClassDescriptorByName(name);
if (cd == null) { // PathQueries should take care of this, but you know.
throw new ObjectStoreException(
String.format("%s is not a class in the %s model", name, model.getName()));
}
classes.add(cd.getType());
}
return new BagConstraint(typeClass, op, classes);
}
private static Map<String, String> makeLoopsMap(Collection<PathConstraintLoop> constraints) {
// A PathConstraintLoop should participate in this mechanism if it is an EQUALS constraint,
// and its code is not inside an OR in the constraint logic.
// Let's look at this from an equivalence groups point of view. We need to cope with the
// situation where a = a.b.c and a.d = a.b.c, putting all three into an equivalence group.
// The group name should be the shortest path in the group, or the lowest compareTo() for
// a tie-break.
Map<String, String> membership = new HashMap<String, String>();
Map<String, Set<String>> groups = new HashMap<String, Set<String>>();
for (PathConstraintLoop loop : constraints) {
if (ConstraintOp.EQUALS.equals(loop.getOp())) {
String path1 = loop.getPath();
String path2 = loop.getLoopPath();
if (membership.containsKey(path1)) {
if (membership.containsKey(path2)) {
String existingGroup1 = membership.get(path1);
String existingGroup2 = membership.get(path2);
if (!existingGroup1.equals(existingGroup2)) {
Set<String> members1 = groups.remove(existingGroup1);
Set<String> members2 = groups.remove(existingGroup2);
members1.addAll(members2);
String shorter = shorterPath(existingGroup1, existingGroup2);
for (String toAdd : members1) {
membership.put(toAdd, shorter);
}
groups.put(shorter, members1);
}
} else {
String existingGroup = membership.get(path1);
Set<String> members = groups.remove(existingGroup);
members.add(path2);
String shorter = shorterPath(path2, existingGroup);
for (String toAdd : members) {
membership.put(toAdd, shorter);
}
groups.put(shorter, members);
}
} else {
if (membership.containsKey(path2)) {
String existingGroup = membership.get(path2);
Set<String> members = groups.remove(existingGroup);
members.add(path1);
String shorter = shorterPath(path1, existingGroup);
for (String toAdd : members) {
membership.put(toAdd, shorter);
}
groups.put(shorter, members);
} else {
String shorter = shorterPath(path1, path2);
membership.put(path2, shorter);
membership.put(path1, shorter);
groups.put(shorter, new HashSet<String>(Arrays.asList(path1, path2)));
}
}
}
}
Map<String, String> retval = new HashMap<String, String>();
for (Map.Entry<String, String> entry : membership.entrySet()) {
if (!entry.getKey().equals(entry.getValue())) {
retval.put(entry.getKey(), entry.getValue());
}
}
return retval;
}
// find any reference or collection paths in query that have NULL/NOT NULL constraints.
// If nullOnly then return those that have a NULL/NOT NULL constraint that otherwise don't
// appear in the view or other constraints
private static Map<String, String> getPathConstraintNulls(Model model, PathQuery pq,
boolean nullOnly) {
Map<String, String> nullRefsAndCols = new HashMap<String, String>();
for (Map.Entry<PathConstraint, String> entry : pq.getConstraints().entrySet()) {
PathConstraint constraint = entry.getKey();
String code = entry.getValue();
if (constraint instanceof PathConstraintNull) {
try {
Path constraintPath = new Path(model, constraint.getPath());
if (constraintPath.endIsReference() || constraintPath.endIsCollection()) {
boolean isNullOnly = true;
// look for any view elements starting with this path
for (String viewPath : pq.getView()) {
if (viewPath.startsWith(constraintPath.toString())) {
isNullOnly = false;
}
}
// look for any other constraints starting with this path
for (PathConstraint otherCon : pq.getConstraints().keySet()) {
if (otherCon != constraint
&& otherCon.getPath().startsWith(constraintPath.toString())) {
isNullOnly = false;
}
}
// constraint path wasn't found elsewhere so it's a null collection only
if (nullOnly && isNullOnly) {
nullRefsAndCols.put(constraintPath.toString(), code);
} else if (!nullOnly) {
nullRefsAndCols.put(constraintPath.toString(), code);
}
}
} catch (PathException e) {
// this shouldn't happen because the query is already verified
LOG.warn("Error finding paths constrainted to null only:" + e);
}
}
}
return nullRefsAndCols;
}
private static String shorterPath(String path1, String path2) {
if (path1.length() > path2.length()) {
return path2;
} else if (path2.length() > path1.length()) {
return path1;
} else if (path1.compareTo(path2) > 0) {
return path2;
} else if (path1.compareTo(path2) < 0) {
return path1;
} else {
throw new IllegalArgumentException("Two paths are identical: " + path1);
}
}
/**
* Returns the Set of PathConstraintLoop objects that will participate in the QueryClass
* collapsing mechanism.
*
* @param logic the constraint logic
* @param constraints a Map from PathConstraint to code
* @return a Set of PathConstraintLoop objects
*/
protected static Set<PathConstraintLoop> findParticipatingLoops(LogicExpression logic,
Map<PathConstraint, String> constraints) {
if (logic != null) {
LogicExpression.Node node = logic.getRootNode();
Set<String> codes = new HashSet<String>();
findAndCodes(codes, node);
Set<PathConstraintLoop> retval = new HashSet<PathConstraintLoop>();
for (Map.Entry<PathConstraint, String> entry : constraints.entrySet()) {
if (codes.contains(entry.getValue())) {
if (entry.getKey() instanceof PathConstraintLoop) {
if (ConstraintOp.EQUALS.equals(entry.getKey().getOp())) {
retval.add((PathConstraintLoop) entry.getKey());
}
}
}
}
return retval;
}
return Collections.emptySet();
}
/**
* Finds all the codes in a constraint logic that are ANDed in the given constraint logic.
*
* @param codes codes are added to this
* @param node a node to traverse
*/
protected static void findAndCodes(Set<String> codes, LogicExpression.Node node) {
if (node instanceof LogicExpression.Variable) {
codes.add(((LogicExpression.Variable) node).getName());
} else if (node instanceof LogicExpression.And) {
for (LogicExpression.Node child : ((LogicExpression.And) node).getChildren()) {
findAndCodes(codes, child);
}
}
}
/**
* Make a SimpleConstraint for the given constraint. The Constraint will be
* case-insensitive. If the constraint value contains a wildcard and the operation is "=" or
* "<>" then the operation will be changed to "LIKE" or "NOT_LIKE" as appropriate.
*/
private static SimpleConstraint makeQueryStringConstraint(QueryField qf,
PathConstraintAttribute c) {
QueryEvaluable qe;
String value;
ConstraintOp op = c.getOp();
// Perform case insensitive matches, unless asked specifically not to.
if (ConstraintOp.EXACT_MATCH.equals(op) || ConstraintOp.STRICT_NOT_EQUALS.equals(op)) {
qe = qf;
value = c.getValue();
op = (ConstraintOp.EXACT_MATCH.equals(op))
? ConstraintOp.EQUALS: ConstraintOp.NOT_EQUALS;
} else {
qe = new QueryExpression(QueryExpression.LOWER, qf);
value = Util.wildcardUserToSql(c.getValue().toLowerCase());
}
// notes:
// - we always turn EQUALS into a MATCHES(LIKE) constraint and rely on Postgres
// to be sensible
// - lowerCaseValue is quoted in a way suitable for a LIKE constraint, but not for an
// normal equals. for example 'Dpse\GA10108' needs to be 'Dpse\\GA10108' for equals
// but 'Dpse\\\\GA10108' (and hence "Dpse\\\\\\\\GA10108" as a Java string because
// backslash must be quoted with a backslash)
if (ConstraintOp.EQUALS.equals(op)) {
return new SimpleConstraint(qe, ConstraintOp.MATCHES, new QueryValue(value));
} else if (ConstraintOp.NOT_EQUALS.equals(op)) {
return new SimpleConstraint(qe, ConstraintOp.DOES_NOT_MATCH, new QueryValue(value));
} else if (ConstraintOp.CONTAINS.equals(op)) {
return new SimpleConstraint(qe, ConstraintOp.MATCHES,
new QueryValue("%" + value + "%"));
} else {
return new SimpleConstraint(qe, op, new QueryValue(value));
}
}
/**
* Make a SimpleConstraint for the given Date Constraint. The time stored in the Date will be
* ignored. Example webapp constraints and the coresponding object store constraints:
* <table>
* <thead>
* <tr>
* <th>Webapp Version</th>
* <th>ObjectStore Version</th>
* </tr>
* </thead>
* <tbody>
* <tr>
* <td>
* <code><= 2008-01-02</code>
* </td>
* <td>
* <code>>= 2008-01-02 23:59:59</code>
* </td>
* </tr>
* <tr>
* <td>
* <code>> 2008-01-02</code>
* </td>
* <td>
* <code>< 2008-01-02 00:00:00</code>
* </td>
* </tr>
* <tr>
* <td>
* <code>> 2008-01-02</code>
* </td>
* <td>
* <code>> 2008-01-02 23:59:59</code>
* </td>
* </tr>
* <tr>
* <td>
* <code>>= 2008-01-02</code>
* </td>
* <td>
* <code>> 2008-01-02 00:00:00</code>
* </td>
* </tr>
* </tbody>
* </table>
*
* @param qf the QueryNode in the new query
* @param c the webapp constraint
* @return a new object store constraint
*/
protected static Constraint makeQueryDateConstraint(QueryField qf, PathConstraintAttribute c) {
Date dateValue = (Date) TypeUtil.stringToObject(Date.class, c.getValue());
Calendar startOfDay = GregorianCalendar.getInstance(TimeZone.getTimeZone("GMT"));
startOfDay.setTime(dateValue);
startOfDay.set(Calendar.HOUR_OF_DAY, 0);
startOfDay.set(Calendar.MINUTE, 0);
startOfDay.set(Calendar.SECOND, 0);
startOfDay.set(Calendar.MILLISECOND, 0);
QueryValue startOfDayQV = new QueryValue(startOfDay.getTime());
Calendar endOfDay = (Calendar) startOfDay.clone();
endOfDay.add(Calendar.DATE, 1);
QueryValue endOfDayQV = new QueryValue(endOfDay.getTime());
if (ConstraintOp.EXACT_MATCH.equals(c.getOp()) || ConstraintOp.EQUALS.equals(c.getOp())) {
ConstraintSet cs = new ConstraintSet(ConstraintOp.AND);
cs.addConstraint(new SimpleConstraint(qf, ConstraintOp.GREATER_THAN_EQUALS,
startOfDayQV));
cs.addConstraint(new SimpleConstraint(qf, ConstraintOp.LESS_THAN, endOfDayQV));
return cs;
} else if (ConstraintOp.NOT_EQUALS.equals(c.getOp())) {
ConstraintSet cs = new ConstraintSet(ConstraintOp.OR);
cs.addConstraint(new SimpleConstraint(qf, ConstraintOp.LESS_THAN, startOfDayQV));
cs.addConstraint(new SimpleConstraint(qf, ConstraintOp.GREATER_THAN_EQUALS,
endOfDayQV));
return cs;
} else if (ConstraintOp.LESS_THAN_EQUALS.equals(c.getOp())) {
return new SimpleConstraint(qf, ConstraintOp.LESS_THAN, endOfDayQV);
} else if (ConstraintOp.LESS_THAN.equals(c.getOp())) {
return new SimpleConstraint(qf, ConstraintOp.LESS_THAN, startOfDayQV);
} else if (ConstraintOp.GREATER_THAN.equals(c.getOp())) {
return new SimpleConstraint(qf, ConstraintOp.GREATER_THAN_EQUALS, endOfDayQV);
} else if (ConstraintOp.GREATER_THAN_EQUALS.equals(c.getOp())) {
return new SimpleConstraint(qf, ConstraintOp.GREATER_THAN_EQUALS, startOfDayQV);
} else {
throw new RuntimeException("Unknown ConstraintOp: " + c);
}
}
/**
* Given a LogicExpression, a Map from codes to Constraint objects, and a ConstraintSet to put
* it all in, construct a tree of ConstraintSets that reflects the expression.
*
* @param logic the LogicExpression object
* @param cs the ConstraintSet to put the constraints in
* @param codeToConstraint a Map from constraint code to Constraint object
*/
protected static void createConstraintStructure(LogicExpression logic, ConstraintSet cs,
Map<String, Constraint> codeToConstraint) {
if (logic != null) {
LogicExpression.Node node = logic.getRootNode();
createConstraintStructure(node, cs, codeToConstraint);
}
}
/**
* Given a LogicExpression.Node, a Map from codes to Constraint objects, and a ConstraintSet to
* put it all in, construct a tree of ConstraintSets that reflects the expression.
*
* @param node the LogicExpression.Node object
* @param cs the ConstraintSet to put the constraints in
* @param codeToConstraint a Map from constraint code to Constraint object
*/
protected static void createConstraintStructure(LogicExpression.Node node, ConstraintSet cs,
Map<String, Constraint> codeToConstraint) {
if (node instanceof LogicExpression.Variable) {
Constraint con = codeToConstraint.get(((LogicExpression.Variable) node).getName());
if (con != null) {
// If it is null, then it is probably a Loop constraint that participated in
// QueryClass collapsing.
cs.addConstraint(con);
}
} else {
LogicExpression.Operator op = (LogicExpression.Operator) node;
ConstraintSet set = null;
if (op instanceof LogicExpression.And) {
if (ConstraintOp.AND.equals(cs.getOp())) {
set = cs;
} else {
set = new ConstraintSet(ConstraintOp.AND);
}
} else {
if (ConstraintOp.OR.equals(cs.getOp())) {
set = cs;
} else {
set = new ConstraintSet(ConstraintOp.OR);
}
}
for (LogicExpression.Node child : op.getChildren()) {
createConstraintStructure(child, set, codeToConstraint);
}
if (set != cs) {
cs.addConstraint(set);
}
}
}
/**
* Add a constraint code to a logic expression, ANDed with any constraints already in the
* expression, e.g. 'A OR B' + code C -> '(A OR B) AND C'. If the expression is null a new
* expression is created.
* @param logic an existing constraint logic
* @param code the code to add
* @return a new logic expression including the new code
*/
protected static LogicExpression addToConstraintLogic(LogicExpression logic, String code) {
LogicExpression newLogic = logic;
if (logic == null) {
newLogic = new LogicExpression(code);
} else {
newLogic = new LogicExpression("(" + logic.toString() + ") AND " + code);
}
return newLogic;
}
/**
* Remove a constraint code from a logic expression, e.g. '(A OR B) AND C' -> 'B AND C'. If
* there is only one code in the expression return null.
* @param logic an existing constraint logic
* @param code the code to remove
* @return a new logic expression or null if the expression is now empty
*/
protected static LogicExpression removeFromConstraintLogic(LogicExpression logic,
String code) {
if (logic != null) {
try {
logic.removeVariable(code);
} catch (IllegalArgumentException e) {
// an IllegalArgumentException is thrown if we try to remove the root node, this
// would make an empty expression so we can just set it to null
return null;
}
}
return logic;
}
/**
* Generate a query from a PathQuery, to summarise a particular column of results.
*
* @param pathQuery the PathQuery
* @param savedBags the current saved bags map
* @param pathToQueryNode Map, into which columns to display will be placed
* @param summaryPath a String path of the column to summarise
* @param os an ObjectStore to do LOOKUP queries in
* @param classKeys class key config
* @param bagQueryConfig a BagQueryConfig object
* @param pm the ProfileManager to fetch the superuser profile from
* @param occurancesOnly Force summary to take form of item summary if true.
* @return the generated summary query
* @throws ObjectStoreException if there is a problem creating the query
*/
public static Query makeSummaryQuery(
PathQuery pathQuery,
Map<String, InterMineBag> savedBags,
Map<String, QuerySelectable> pathToQueryNode,
String summaryPath,
ObjectStore os,
Map<String, List<FieldDescriptor>> classKeys,
BagQueryConfig bagQueryConfig,
ProfileManager pm,
boolean occurancesOnly) throws ObjectStoreException {
TemplateManager templateManager = new TemplateManager(pm.getSuperuserProfile(),
os.getModel());
BagQueryRunner bagQueryRunner = new BagQueryRunner(os, classKeys, bagQueryConfig,
templateManager);
return MainHelper.makeSummaryQuery(pathQuery, summaryPath, savedBags, pathToQueryNode,
bagQueryRunner, occurancesOnly);
}
/**
* Generate a query from a PathQuery, to summarise a particular column of results.
*
* @param pathQuery the PathQuery
* @param summaryPath a String path of the column to summarise
* @param savedBags the current saved bags map
* @param pathToQueryNode Map, into which columns to display will be placed
* @param bagQueryRunner a BagQueryRunner to execute bag queries
* @return the generated summary query
* @throws ObjectStoreException if there is a problem creating the query
*/
public static Query makeSummaryQuery(
PathQuery pathQuery,
String summaryPath,
Map<String, InterMineBag> savedBags,
Map<String, QuerySelectable> pathToQueryNode,
BagQueryRunner bagQueryRunner) throws ObjectStoreException {
return makeSummaryQuery(pathQuery, summaryPath, savedBags, pathToQueryNode,
bagQueryRunner, false);
}
/**
* Generate a query from a PathQuery, to summarise a particular column of results.
*
* @param pathQuery the PathQuery
* @param summaryPath a String path of the column to summarise
* @param savedBags the current saved bags map
* @param pathToQueryNode Map, into which columns to display will be placed
* @param bagQueryRunner a BagQueryRunner to execute bag queries
* @param occurancesOnly Force summary to take form of item summary if true.
* @return the generated summary query
* @throws ObjectStoreException if there is a problem creating the query
*/
public static Query makeSummaryQuery(
PathQuery pathQuery,
String summaryPath,
Map<String, InterMineBag> savedBags,
Map<String, QuerySelectable> pathToQueryNode,
BagQueryRunner bagQueryRunner,
boolean occurancesOnly) throws ObjectStoreException {
Map<String, QuerySelectable> origPathToQueryNode = new HashMap<String, QuerySelectable>();
Query subQ = null;
subQ = makeQuery(pathQuery, savedBags, origPathToQueryNode, bagQueryRunner, null);
subQ.clearOrderBy();
Map<String, QuerySelectable> newSelect = new LinkedHashMap<String, QuerySelectable>();
Set<QuerySelectable> oldSelect = new HashSet<QuerySelectable>();
for (QuerySelectable qs : subQ.getSelect()) {
oldSelect.add(qs);
if (qs instanceof QueryClass) {
newSelect.put(subQ.getAliases().get(qs), qs);
} else if (!(qs instanceof QueryPathExpression)) {
newSelect.put(subQ.getAliases().get(qs), qs);
}
}
subQ.clearSelect();
for (Map.Entry<String, QuerySelectable> selectEntry : newSelect.entrySet()) {
subQ.addToSelect(selectEntry.getValue(), selectEntry.getKey());
}
return recursiveMakeSummaryQuery(origPathToQueryNode, summaryPath, subQ, oldSelect,
pathToQueryNode, occurancesOnly);
}
private static Query recursiveMakeSummaryQuery(
Map<String, QuerySelectable>
origPathToQueryNode,
String summaryPath,
Query subQ, Set<QuerySelectable> oldSelect,
Map<String, QuerySelectable> pathToQueryNode,
boolean occurancesOnly) {
QueryField qf = (QueryField) origPathToQueryNode.get(summaryPath);
try {
if ((qf == null) || (!subQ.getFrom().contains(qf.getFromElement()))) {
// This column may be an outer join
String prefix = summaryPath.substring(0, summaryPath.lastIndexOf('.'));
String fieldName = summaryPath.substring(summaryPath.lastIndexOf('.') + 1);
QuerySelectable qs = origPathToQueryNode.get(prefix);
if (qs == null) {
throw new NullPointerException("Error - path " + summaryPath + " is not in map "
+ origPathToQueryNode);
} else if (qs instanceof QueryObjectPathExpression) {
QueryObjectPathExpression qope = (QueryObjectPathExpression) qs;
if ((!oldSelect.contains(qs))
&& (!oldSelect.contains(new PathExpressionField(qope, 0)))) {
throw new IllegalArgumentException("QueryObjectPathExpression is too deeply"
+ " nested");
}
// We need to add QueryClasses to the query for this outer join. This will make
// it an inner join, so the "no object" results will disappear.
QueryClass lastQc = qope.getDefaultClass();
qf = new QueryField(lastQc, fieldName);
subQ.addFrom(lastQc);
subQ.addToSelect(lastQc);
QueryClass rootQc = qope.getQueryClass();
QueryHelper.addAndConstraint(subQ, new ContainsConstraint(
new QueryObjectReference(rootQc, qope.getFieldName()),
ConstraintOp.CONTAINS, lastQc));
if (qope.getConstraint() != null) {
QueryHelper.addAndConstraint(subQ, qope.getConstraint());
}
} else if (qs instanceof QueryCollectionPathExpression) {
QueryCollectionPathExpression qcpe = (QueryCollectionPathExpression) qs;
//if (qcpe.getSelect().isEmpty() && qcpe.getFrom().isEmpty()
// && oldSelect.contains(qcpe)) {
if (oldSelect.contains(qcpe)) {
QueryClass firstQc = qcpe.getDefaultClass();
qf = new QueryField(firstQc, fieldName);
subQ.addFrom(firstQc);
subQ.addToSelect(firstQc);
QueryClass rootQc = qcpe.getQueryClass();
try {
QueryHelper.addAndConstraint(subQ, new ContainsConstraint(
new QueryCollectionReference(rootQc, qcpe.getFieldName()),
ConstraintOp.CONTAINS, firstQc));
} catch (IllegalArgumentException e) {
QueryHelper.addAndConstraint(subQ, new ContainsConstraint(
new QueryObjectReference(rootQc, qcpe.getFieldName()),
ConstraintOp.CONTAINS, firstQc));
}
for (FromElement extraQc : qcpe.getFrom()) {
if (extraQc instanceof QueryClass) {
subQ.addFrom(extraQc);
subQ.addToSelect((QueryClass) extraQc);
} else {
throw new IllegalArgumentException("FromElement is not a "
+ "QueryClass: " + extraQc);
}
}
if (qcpe.getConstraint() != null) {
QueryHelper.addAndConstraint(subQ, qcpe.getConstraint());
}
} else {
throw new IllegalArgumentException("QueryCollectionPathExpression is too"
+ " complicated to summarise");
}
} else {
throw new IllegalArgumentException("Error - path " + prefix + " resolves to"
+ " unknown object " + qs);
}
}
} catch (IllegalArgumentException e) {
for (QuerySelectable qs : oldSelect) {
try {
if ((qs instanceof PathExpressionField)
&& (((PathExpressionField) qs).getFieldNumber() == 0)) {
QueryObjectPathExpression qope = ((PathExpressionField) qs).getQope();
Query tempSubQ = QueryCloner.cloneQuery(subQ);
QueryClass lastQc = qope.getDefaultClass();
tempSubQ.addFrom(lastQc);
tempSubQ.addToSelect(lastQc);
QueryClass rootQc = qope.getQueryClass();
QueryHelper.addAndConstraint(tempSubQ, new ContainsConstraint(
new QueryObjectReference(rootQc, qope.getFieldName()),
ConstraintOp.CONTAINS, lastQc));
if (qope.getConstraint() != null) {
QueryHelper.addAndConstraint(tempSubQ, qope.getConstraint());
}
return recursiveMakeSummaryQuery(origPathToQueryNode, summaryPath, tempSubQ,
new HashSet<QuerySelectable>(qope.getSelect()), pathToQueryNode,
occurancesOnly);
} else if (qs instanceof QueryCollectionPathExpression) {
QueryCollectionPathExpression qcpe = (QueryCollectionPathExpression) qs;
QueryClass firstQc = qcpe.getDefaultClass();
Query tempSubQ = QueryCloner.cloneQuery(subQ);
tempSubQ.addFrom(firstQc);
tempSubQ.addToSelect(firstQc);
QueryClass rootQc = qcpe.getQueryClass();
try {
QueryHelper.addAndConstraint(tempSubQ, new ContainsConstraint(
new QueryCollectionReference(rootQc, qcpe.getFieldName()),
ConstraintOp.CONTAINS, firstQc));
} catch (IllegalArgumentException e2) {
QueryHelper.addAndConstraint(tempSubQ, new ContainsConstraint(
new QueryObjectReference(rootQc, qcpe.getFieldName()),
ConstraintOp.CONTAINS, firstQc));
}
for (FromElement extraQc : qcpe.getFrom()) {
if (extraQc instanceof QueryClass) {
tempSubQ.addFrom(extraQc);
tempSubQ.addToSelect((QueryClass) extraQc);
} else {
throw new IllegalArgumentException("FromElement is not a "
+ "QueryClass: " + extraQc);
}
}
if (qcpe.getConstraint() != null) {
QueryHelper.addAndConstraint(tempSubQ, qcpe.getConstraint());
}
return recursiveMakeSummaryQuery(origPathToQueryNode, summaryPath, tempSubQ,
new HashSet<QuerySelectable>(qcpe.getSelect()), pathToQueryNode,
occurancesOnly);
}
} catch (IllegalArgumentException e2) {
// Ignore it - we are searching for a working branch of the query
}
}
throw new IllegalArgumentException(
"Cannot find path (" + summaryPath + ") in query", e);
}
Query q = new Query();
q.addFrom(subQ);
subQ.addToSelect(qf);
qf = new QueryField(subQ, qf);
Class<?> summaryType = qf.getType();
QueryField origQf = (QueryField) origPathToQueryNode.get(summaryPath);
String fieldName = origQf.getFieldName();
String className = Util.getFriendlyName(((QueryClass) origQf.getFromElement())
.getType());
if (!occurancesOnly && isNumeric(summaryType)
&& (!SummaryConfig.summariseAsOccurrences(className + "." + fieldName))) {
return getHistogram(subQ, qf, pathToQueryNode);
} else if ((summaryType == String.class) || (summaryType == Boolean.class)
|| (summaryType == Long.class) || (summaryType == Integer.class)
|| (summaryType == Short.class) || (summaryType == Byte.class)
|| (summaryType == Float.class) || (summaryType == Double.class)
|| (summaryType == BigDecimal.class)) {
q.addToSelect(qf);
q.addToGroupBy(qf);
QueryNode count = new QueryFunction();
q.addToSelect(count);
pathToQueryNode.put(summaryPath, qf);
pathToQueryNode.put("Occurrences", count);
q.addToOrderBy(new OrderDescending(count));
} else {
// Probably Date
throw new IllegalArgumentException("Cannot summarise this column");
}
return q;
}
private static boolean isNumeric(Class<?> summaryType) {
return (summaryType == Long.class) || (summaryType == Integer.class)
|| (summaryType == Short.class) || (summaryType == Byte.class)
|| (summaryType == Float.class) || (summaryType == Double.class)
|| (summaryType == BigDecimal.class);
}
/**
* Produce a histogram query for a numerical column.
*
* In addition to the bucket number and the count for each bucket, each row also includes
* the general statistics previously supplied for backwards compatibility.
*
* BASIC IDEA:
* <pre>
* select bq.max, bq.min, sum(bq.c) as total, bq.bucket, from (
* select count(*) as c,
* q1.value as val,
* width_bucket(q1.value, q2.min, (q2.max * 1.01), 10) as bucket,
* q2.max as max,
* q2.min as min
* from (select v.value from values as v) as vals,
* (select max(v.value) as max, min(v.value) as min from values as v) as stats
* group by vals.value, stats.min, stats.max order by bucket, vals.value
* ) as bq
* group by bq.bucket, bq.max, bq.min
* order by bq.bucket;
* </pre>
*
* @param subq The source of the data.
* @param qf The field that contains the numerical information we are interested in.
* @param pathToQueryNode The map to update with names of columns.
* @return A query that when run will return a result set where each row has a bin number
* where 1 <= binNumber <= configuredMaxNoOfBins and a number of items in the data
* set that belong in the given bin.
*/
private static Query getHistogram(
Query source,
QueryField qf,
Map<String, QuerySelectable> pathToQueryNode) {
// Inner 1
Query vq = new Query();
vq.addFrom(source);
vq.addToSelect(qf);
vq.setDistinct(false);
// Inner 2
Query statsq = new Query();
statsq.addFrom(source);
QueryFunction min = new QueryFunction(qf, QueryFunction.MIN);
QueryFunction max = new QueryFunction(qf, QueryFunction.MAX);
QueryFunction avg = new QueryFunction(qf, QueryFunction.AVERAGE);
QueryFunction stddev = new QueryFunction(qf, QueryFunction.STDDEV);
QueryEvaluable bins = new QueryValue(SummaryConfig.getNumberOfBins());
Class<?> summaryType = qf.getType();
if (summaryType == Long.class || summaryType == Integer.class) {
bins = new QueryExpression(
bins, QueryExpression.LEAST,
new QueryExpression(max, QueryExpression.SUBTRACT, min)
);
}
statsq.addToSelect(min);
statsq.addToSelect(max);
statsq.addToSelect(avg);
statsq.addToSelect(stddev);
statsq.addToSelect(bins);
// Inner 3
Query bucketq = new Query();
bucketq.setDistinct(false);
QueryFunction count = new QueryFunction();
QueryField val = new QueryField(vq, qf);
QueryField maxval = new QueryField(statsq, max);
QueryField minval = new QueryField(statsq, min);
QueryField meanval = new QueryField(statsq, avg);
QueryField devval = new QueryField(statsq, stddev);
QueryExpression upperBound = new QueryExpression(
new QueryCast(maxval, BigDecimal.class),
QueryExpression.MULTIPLY,
new QueryCast(new QueryValue(new Double(1.01)), BigDecimal.class));
QueryField noOfBuckets = new QueryField(statsq, bins);
QueryFunction bucket = new WidthBucketFunction(val, minval, upperBound, noOfBuckets);
bucketq.addFrom(vq);
bucketq.addFrom(statsq);
bucketq.addToSelect(count);
bucketq.addToSelect(val);
bucketq.addToSelect(maxval);
bucketq.addToSelect(minval);
bucketq.addToSelect(meanval);
bucketq.addToSelect(devval);
bucketq.addToSelect(bucket);
bucketq.addToSelect(noOfBuckets);
bucketq.addToGroupBy(val);
bucketq.addToGroupBy(maxval);
bucketq.addToGroupBy(minval);
bucketq.addToGroupBy(meanval);
bucketq.addToGroupBy(devval);
bucketq.addToGroupBy(noOfBuckets);
bucketq.addToOrderBy(bucket);
bucketq.addToOrderBy(val);
// Outer
Query q = new Query();
QueryField bmax = new QueryField(bucketq, maxval);
QueryField bmin = new QueryField(bucketq, minval);
QueryField bmean = new QueryField(bucketq, meanval);
QueryField bdev = new QueryField(bucketq, devval);
QueryField bbucket = new QueryField(bucketq, bucket);
QueryFunction bucketTotal = new QueryFunction(
new QueryField(bucketq, count), QueryFunction.SUM);
QueryField buckets = new QueryField(bucketq, noOfBuckets);
q.addFrom(bucketq);
q.addToSelect(bmin);
q.addToSelect(bmax);
q.addToSelect(bmean);
q.addToSelect(bdev);
q.addToSelect(buckets);
q.addToSelect(bbucket);
q.addToSelect(bucketTotal);
q.addToGroupBy(bmin);
q.addToGroupBy(bmax);
q.addToGroupBy(bmean);
q.addToGroupBy(bdev);
q.addToGroupBy(bbucket);
q.addToGroupBy(buckets);
q.addToOrderBy(bbucket);
pathToQueryNode.put("Minimum", bmin);
pathToQueryNode.put("Maximum", bmax);
pathToQueryNode.put("Average", bmean);
pathToQueryNode.put("Standard Deviation", bdev);
pathToQueryNode.put("Buckets", bucketTotal);
pathToQueryNode.put("Bucket", bbucket);
pathToQueryNode.put("Occurances", bucketTotal);
return q;
}
/**
* @param props properties to configure the range queries
*/
public static void loadHelpers(Properties props) {
RangeConfig.loadHelpers(props);
}
// Allow collections with stable orderings by class name.
private static final class ClassNameComparator implements Comparator<Class<?>>
{
@Override
public int compare(Class<?> o1, Class<?> o2) {
return o1.getName().compareTo(o2.getName());
}
}
/**
* @author Alex
*/
protected static final class RangeConfig
{
private RangeConfig() {
// Restricted constructor.
}
protected static Map<Class<?>, RangeHelper> rangeHelpers;
static {
init();
}
/**
* reset
*/
protected static void reset() {
init();
}
private static void init() {
rangeHelpers = new HashMap<Class<?>, RangeHelper>();
// Default basic helpers.
// rangeHelpers.put(int.class, new IntHelper());
// rangeHelpers.put(Integer.class, new IntHelper());
// rangeHelpers.put(String.class, new StringHelper());
loadHelpers(PropertiesUtil.getProperties());
}
/**
* @param allProps all properties
*/
@SuppressWarnings("unchecked")
protected static void loadHelpers(Properties allProps) {
Properties props = PropertiesUtil.getPropertiesStartingWith("pathquery.range.",
allProps);
for (String key: props.stringPropertyNames()) {
String[] parts = key.split("\\.", 3);
if (parts.length != 3) {
throw new IllegalStateException(
"Property names must be in the format "
+ "pathquery.range.${FullyQualifiedClassName}, got '" + key + "'"
);
}
String targetTypeName = parts[2];
Class<?> targetType;
try {
targetType = Class.forName(targetTypeName);
} catch (ClassNotFoundException e) {
throw new RuntimeException("Cannot find class named in config: '" + key
+ "'", e);
}
String helperName = props.getProperty(key);
Class<RangeHelper> helperType;
try {
helperType = (Class<RangeHelper>) Class.forName(helperName);
} catch (ClassNotFoundException e) {
throw new RuntimeException("Cannot find class named in congfig: '" + helperName
+ "'");
}
RangeHelper helper;
try {
helper = helperType.newInstance();
} catch (InstantiationException e) {
throw new RuntimeException("Could not instantiate range helper for '" + key
+ "'", e);
} catch (IllegalAccessException e) {
throw new RuntimeException("Could not instantiate range helper for '" + key
+ "'", e);
}
rangeHelpers.put(targetType, helper);
LOG.info("ADDED RANGE HELPER FOR " + targetType + " (" + helperType.getName()
+ ")");
}
}
/**
* @param type class
* @return true if there is helper for this class of object
*/
public static boolean hasHelperForType(Class<?> type) {
return rangeHelpers.containsKey(type);
}
/**
* @param type type
* @return helper for given type
*/
public static RangeHelper getHelper(Class<?> type) {
return rangeHelpers.get(type);
}
}
/**
* @return set of classes that are legal to use with range constraints
*/
public static Set<Class<?>> getValidRangeTargets() {
return RangeConfig.rangeHelpers.keySet();
}
/**
* Controls access to configuration information on which fields should be summarised as a count
* of occurrences.
*
* @author Matthew Wakeling
*/
protected static final class SummaryConfig
{
private SummaryConfig() {
}
private static Set<String> config;
static {
config = new HashSet<String>();
String stringConfig = PropertiesUtil.getProperties()
.getProperty("querySummary.summariseAsOccurrences");
if (stringConfig != null) {
String[] stringConfigs = stringConfig.split(",");
for (String configEntry : stringConfigs) {
configEntry = configEntry.trim();
if (configEntry.contains(" ")) {
throw new IllegalArgumentException("querySummary.summariseAsOccurrences "
+ "property contains an entry with a space: \"" + configEntry
+ "\". Entries should be comma-separated.");
}
config.add(configEntry);
}
}
}
/**
* Returns whether the given field name is configured to be summarised as a count of
* occurrences.
*
* @param fieldName a class name, a dot, and a field name
* @return true if the field should be summarised as a count of occurrences, false for
* a mean and standard deviation.
*/
public static boolean summariseAsOccurrences(String fieldName) {
return config.contains(fieldName);
}
/**
* Returns the number of bins to split a histogram into.
* @return The number of bins.
*/
public static Integer getNumberOfBins() {
return Integer.valueOf(
PropertiesUtil.getProperties().getProperty("querySummary.no-of-bins", "20"));
}
}
/**
* @param q field
* @param node class
* @param con contraint
* @return range constraint
*/
public static Constraint makeRangeConstraint(
Queryable q,
QueryNode node,
PathConstraintRange con) {
Class<?> type = node.getType();
if (RangeConfig.hasHelperForType(type)) {
RangeHelper helper = RangeConfig.getHelper(type);
return helper.createConstraint(q, node, con);
}
throw new RuntimeException("No range constraints are possible for paths of type "
+ type.getName());
}
}
| support for DOES_NOT_CONTAIN in query xml
| intermine/api/main/src/org/intermine/api/query/MainHelper.java | support for DOES_NOT_CONTAIN in query xml | <ide><path>ntermine/api/main/src/org/intermine/api/query/MainHelper.java
<ide> } else if (ConstraintOp.CONTAINS.equals(op)) {
<ide> return new SimpleConstraint(qe, ConstraintOp.MATCHES,
<ide> new QueryValue("%" + value + "%"));
<add> } else if (ConstraintOp.DOES_NOT_CONTAIN.equals(op)) {
<add> return new SimpleConstraint(qe, ConstraintOp.DOES_NOT_MATCH,
<add> new QueryValue("%" + value + "%"));
<ide> } else {
<ide> return new SimpleConstraint(qe, op, new QueryValue(value));
<ide> } |
|
Java | apache-2.0 | cc0b679af9045fbd34be3b31eb8edfdf70a49488 | 0 | androidx/androidx,aosp-mirror/platform_frameworks_support,AndroidX/androidx,aosp-mirror/platform_frameworks_support,androidx/androidx,aosp-mirror/platform_frameworks_support,AndroidX/androidx,androidx/androidx,androidx/androidx,androidx/androidx,androidx/androidx,AndroidX/androidx,androidx/androidx,androidx/androidx,AndroidX/androidx,AndroidX/androidx,AndroidX/androidx,androidx/androidx,AndroidX/androidx,aosp-mirror/platform_frameworks_support,AndroidX/androidx,aosp-mirror/platform_frameworks_support,AndroidX/androidx,androidx/androidx,AndroidX/androidx | /*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.support.v4.app;
import android.content.Context;
import android.content.res.Configuration;
import android.content.res.TypedArray;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.os.Parcel;
import android.os.Parcelable;
import android.support.annotation.CallSuper;
import android.support.annotation.IdRes;
import android.support.annotation.StringRes;
import android.support.v4.util.DebugUtils;
import android.support.v4.util.LogWriter;
import android.support.v4.view.LayoutInflaterFactory;
import android.support.v4.view.ViewCompat;
import android.util.AttributeSet;
import android.util.Log;
import android.util.SparseArray;
import android.view.animation.AccelerateInterpolator;
import android.view.animation.AlphaAnimation;
import android.view.animation.Animation;
import android.view.animation.AnimationSet;
import android.view.animation.AnimationUtils;
import android.view.animation.DecelerateInterpolator;
import android.view.animation.Interpolator;
import android.view.animation.ScaleAnimation;
import android.view.animation.Animation.AnimationListener;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import java.io.FileDescriptor;
import java.io.PrintWriter;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Static library support version of the framework's {@link android.app.FragmentManager}.
* Used to write apps that run on platforms prior to Android 3.0. When running
* on Android 3.0 or above, this implementation is still used; it does not try
* to switch to the framework's implementation. See the framework {@link FragmentManager}
* documentation for a class overview.
*
* <p>Your activity must derive from {@link FragmentActivity} to use this. From such an activity,
* you can acquire the {@link FragmentManager} by calling
* {@link FragmentActivity#getSupportFragmentManager}.
*/
public abstract class FragmentManager {
/**
* Representation of an entry on the fragment back stack, as created
* with {@link FragmentTransaction#addToBackStack(String)
* FragmentTransaction.addToBackStack()}. Entries can later be
* retrieved with {@link FragmentManager#getBackStackEntryAt(int)
* FragmentManager.getBackStackEntry()}.
*
* <p>Note that you should never hold on to a BackStackEntry object;
* the identifier as returned by {@link #getId} is the only thing that
* will be persisted across activity instances.
*/
public interface BackStackEntry {
/**
* Return the unique identifier for the entry. This is the only
* representation of the entry that will persist across activity
* instances.
*/
public int getId();
/**
* Get the name that was supplied to
* {@link FragmentTransaction#addToBackStack(String)
* FragmentTransaction.addToBackStack(String)} when creating this entry.
*/
public String getName();
/**
* Return the full bread crumb title resource identifier for the entry,
* or 0 if it does not have one.
*/
@StringRes
public int getBreadCrumbTitleRes();
/**
* Return the short bread crumb title resource identifier for the entry,
* or 0 if it does not have one.
*/
@StringRes
public int getBreadCrumbShortTitleRes();
/**
* Return the full bread crumb title for the entry, or null if it
* does not have one.
*/
public CharSequence getBreadCrumbTitle();
/**
* Return the short bread crumb title for the entry, or null if it
* does not have one.
*/
public CharSequence getBreadCrumbShortTitle();
}
/**
* Interface to watch for changes to the back stack.
*/
public interface OnBackStackChangedListener {
/**
* Called whenever the contents of the back stack change.
*/
public void onBackStackChanged();
}
/**
* Start a series of edit operations on the Fragments associated with
* this FragmentManager.
*
* <p>Note: A fragment transaction can only be created/committed prior
* to an activity saving its state. If you try to commit a transaction
* after {@link FragmentActivity#onSaveInstanceState FragmentActivity.onSaveInstanceState()}
* (and prior to a following {@link FragmentActivity#onStart FragmentActivity.onStart}
* or {@link FragmentActivity#onResume FragmentActivity.onResume()}, you will get an error.
* This is because the framework takes care of saving your current fragments
* in the state, and if changes are made after the state is saved then they
* will be lost.</p>
*/
public abstract FragmentTransaction beginTransaction();
/** @hide -- remove once prebuilts are in. */
@Deprecated
public FragmentTransaction openTransaction() {
return beginTransaction();
}
/**
* After a {@link FragmentTransaction} is committed with
* {@link FragmentTransaction#commit FragmentTransaction.commit()}, it
* is scheduled to be executed asynchronously on the process's main thread.
* If you want to immediately executing any such pending operations, you
* can call this function (only from the main thread) to do so. Note that
* all callbacks and other related behavior will be done from within this
* call, so be careful about where this is called from.
*
* @return Returns true if there were any pending transactions to be
* executed.
*/
public abstract boolean executePendingTransactions();
/**
* Finds a fragment that was identified by the given id either when inflated
* from XML or as the container ID when added in a transaction. This first
* searches through fragments that are currently added to the manager's
* activity; if no such fragment is found, then all fragments currently
* on the back stack associated with this ID are searched.
* @return The fragment if found or null otherwise.
*/
public abstract Fragment findFragmentById(@IdRes int id);
/**
* Finds a fragment that was identified by the given tag either when inflated
* from XML or as supplied when added in a transaction. This first
* searches through fragments that are currently added to the manager's
* activity; if no such fragment is found, then all fragments currently
* on the back stack are searched.
* @return The fragment if found or null otherwise.
*/
public abstract Fragment findFragmentByTag(String tag);
/**
* Flag for {@link #popBackStack(String, int)}
* and {@link #popBackStack(int, int)}: If set, and the name or ID of
* a back stack entry has been supplied, then all matching entries will
* be consumed until one that doesn't match is found or the bottom of
* the stack is reached. Otherwise, all entries up to but not including that entry
* will be removed.
*/
public static final int POP_BACK_STACK_INCLUSIVE = 1<<0;
/**
* Pop the top state off the back stack. Returns true if there was one
* to pop, else false. This function is asynchronous -- it enqueues the
* request to pop, but the action will not be performed until the application
* returns to its event loop.
*/
public abstract void popBackStack();
/**
* Like {@link #popBackStack()}, but performs the operation immediately
* inside of the call. This is like calling {@link #executePendingTransactions()}
* afterwards.
* @return Returns true if there was something popped, else false.
*/
public abstract boolean popBackStackImmediate();
/**
* Pop the last fragment transition from the manager's fragment
* back stack. If there is nothing to pop, false is returned.
* This function is asynchronous -- it enqueues the
* request to pop, but the action will not be performed until the application
* returns to its event loop.
*
* @param name If non-null, this is the name of a previous back state
* to look for; if found, all states up to that state will be popped. The
* {@link #POP_BACK_STACK_INCLUSIVE} flag can be used to control whether
* the named state itself is popped. If null, only the top state is popped.
* @param flags Either 0 or {@link #POP_BACK_STACK_INCLUSIVE}.
*/
public abstract void popBackStack(String name, int flags);
/**
* Like {@link #popBackStack(String, int)}, but performs the operation immediately
* inside of the call. This is like calling {@link #executePendingTransactions()}
* afterwards.
* @return Returns true if there was something popped, else false.
*/
public abstract boolean popBackStackImmediate(String name, int flags);
/**
* Pop all back stack states up to the one with the given identifier.
* This function is asynchronous -- it enqueues the
* request to pop, but the action will not be performed until the application
* returns to its event loop.
*
* @param id Identifier of the stated to be popped. If no identifier exists,
* false is returned.
* The identifier is the number returned by
* {@link FragmentTransaction#commit() FragmentTransaction.commit()}. The
* {@link #POP_BACK_STACK_INCLUSIVE} flag can be used to control whether
* the named state itself is popped.
* @param flags Either 0 or {@link #POP_BACK_STACK_INCLUSIVE}.
*/
public abstract void popBackStack(int id, int flags);
/**
* Like {@link #popBackStack(int, int)}, but performs the operation immediately
* inside of the call. This is like calling {@link #executePendingTransactions()}
* afterwards.
* @return Returns true if there was something popped, else false.
*/
public abstract boolean popBackStackImmediate(int id, int flags);
/**
* Return the number of entries currently in the back stack.
*/
public abstract int getBackStackEntryCount();
/**
* Return the BackStackEntry at index <var>index</var> in the back stack;
* entries start index 0 being the bottom of the stack.
*/
public abstract BackStackEntry getBackStackEntryAt(int index);
/**
* Add a new listener for changes to the fragment back stack.
*/
public abstract void addOnBackStackChangedListener(OnBackStackChangedListener listener);
/**
* Remove a listener that was previously added with
* {@link #addOnBackStackChangedListener(OnBackStackChangedListener)}.
*/
public abstract void removeOnBackStackChangedListener(OnBackStackChangedListener listener);
/**
* Put a reference to a fragment in a Bundle. This Bundle can be
* persisted as saved state, and when later restoring
* {@link #getFragment(Bundle, String)} will return the current
* instance of the same fragment.
*
* @param bundle The bundle in which to put the fragment reference.
* @param key The name of the entry in the bundle.
* @param fragment The Fragment whose reference is to be stored.
*/
public abstract void putFragment(Bundle bundle, String key, Fragment fragment);
/**
* Retrieve the current Fragment instance for a reference previously
* placed with {@link #putFragment(Bundle, String, Fragment)}.
*
* @param bundle The bundle from which to retrieve the fragment reference.
* @param key The name of the entry in the bundle.
* @return Returns the current Fragment instance that is associated with
* the given reference.
*/
public abstract Fragment getFragment(Bundle bundle, String key);
/**
* Get a list of all fragments that have been added to the fragment manager.
*
* @return The list of all fragments or null if none.
* @hide
*/
public abstract List<Fragment> getFragments();
/**
* Save the current instance state of the given Fragment. This can be
* used later when creating a new instance of the Fragment and adding
* it to the fragment manager, to have it create itself to match the
* current state returned here. Note that there are limits on how
* this can be used:
*
* <ul>
* <li>The Fragment must currently be attached to the FragmentManager.
* <li>A new Fragment created using this saved state must be the same class
* type as the Fragment it was created from.
* <li>The saved state can not contain dependencies on other fragments --
* that is it can't use {@link #putFragment(Bundle, String, Fragment)} to
* store a fragment reference because that reference may not be valid when
* this saved state is later used. Likewise the Fragment's target and
* result code are not included in this state.
* </ul>
*
* @param f The Fragment whose state is to be saved.
* @return The generated state. This will be null if there was no
* interesting state created by the fragment.
*/
public abstract Fragment.SavedState saveFragmentInstanceState(Fragment f);
/**
* Returns true if the final {@link android.app.Activity#onDestroy() Activity.onDestroy()}
* call has been made on the FragmentManager's Activity, so this instance is now dead.
*/
public abstract boolean isDestroyed();
/**
* Print the FragmentManager's state into the given stream.
*
* @param prefix Text to print at the front of each line.
* @param fd The raw file descriptor that the dump is being sent to.
* @param writer A PrintWriter to which the dump is to be set.
* @param args Additional arguments to the dump request.
*/
public abstract void dump(String prefix, FileDescriptor fd, PrintWriter writer, String[] args);
/**
* Control whether the framework's internal fragment manager debugging
* logs are turned on. If enabled, you will see output in logcat as
* the framework performs fragment operations.
*/
public static void enableDebugLogging(boolean enabled) {
FragmentManagerImpl.DEBUG = enabled;
}
}
final class FragmentManagerState implements Parcelable {
FragmentState[] mActive;
int[] mAdded;
BackStackState[] mBackStack;
public FragmentManagerState() {
}
public FragmentManagerState(Parcel in) {
mActive = in.createTypedArray(FragmentState.CREATOR);
mAdded = in.createIntArray();
mBackStack = in.createTypedArray(BackStackState.CREATOR);
}
public int describeContents() {
return 0;
}
public void writeToParcel(Parcel dest, int flags) {
dest.writeTypedArray(mActive, flags);
dest.writeIntArray(mAdded);
dest.writeTypedArray(mBackStack, flags);
}
public static final Parcelable.Creator<FragmentManagerState> CREATOR
= new Parcelable.Creator<FragmentManagerState>() {
public FragmentManagerState createFromParcel(Parcel in) {
return new FragmentManagerState(in);
}
public FragmentManagerState[] newArray(int size) {
return new FragmentManagerState[size];
}
};
}
/**
* Container for fragments associated with an activity.
*/
final class FragmentManagerImpl extends FragmentManager implements LayoutInflaterFactory {
static boolean DEBUG = false;
static final String TAG = "FragmentManager";
static final boolean HONEYCOMB = android.os.Build.VERSION.SDK_INT >= 11;
static final String TARGET_REQUEST_CODE_STATE_TAG = "android:target_req_state";
static final String TARGET_STATE_TAG = "android:target_state";
static final String VIEW_STATE_TAG = "android:view_state";
static final String USER_VISIBLE_HINT_TAG = "android:user_visible_hint";
static class AnimateOnHWLayerIfNeededListener implements AnimationListener {
private AnimationListener mOrignalListener = null;
private boolean mShouldRunOnHWLayer = false;
private View mView = null;
public AnimateOnHWLayerIfNeededListener(final View v, Animation anim) {
if (v == null || anim == null) {
return;
}
mView = v;
}
public AnimateOnHWLayerIfNeededListener(final View v, Animation anim,
AnimationListener listener) {
if (v == null || anim == null) {
return;
}
mOrignalListener = listener;
mView = v;
}
@Override
@CallSuper
public void onAnimationStart(Animation animation) {
if (mView != null) {
mShouldRunOnHWLayer = shouldRunOnHWLayer(mView, animation);
if (mShouldRunOnHWLayer) {
mView.post(new Runnable() {
@Override
public void run() {
ViewCompat.setLayerType(mView, ViewCompat.LAYER_TYPE_HARDWARE, null);
}
});
}
}
if (mOrignalListener != null) {
mOrignalListener.onAnimationStart(animation);
}
}
@Override
@CallSuper
public void onAnimationEnd(Animation animation) {
if (mView != null && mShouldRunOnHWLayer) {
mView.post(new Runnable() {
@Override
public void run() {
ViewCompat.setLayerType(mView, ViewCompat.LAYER_TYPE_NONE, null);
}
});
}
if (mOrignalListener != null) {
mOrignalListener.onAnimationEnd(animation);
}
}
@Override
public void onAnimationRepeat(Animation animation) {
if (mOrignalListener != null) {
mOrignalListener.onAnimationRepeat(animation);
}
}
}
ArrayList<Runnable> mPendingActions;
Runnable[] mTmpActions;
boolean mExecutingActions;
ArrayList<Fragment> mActive;
ArrayList<Fragment> mAdded;
ArrayList<Integer> mAvailIndices;
ArrayList<BackStackRecord> mBackStack;
ArrayList<Fragment> mCreatedMenus;
// Must be accessed while locked.
ArrayList<BackStackRecord> mBackStackIndices;
ArrayList<Integer> mAvailBackStackIndices;
ArrayList<OnBackStackChangedListener> mBackStackChangeListeners;
int mCurState = Fragment.INITIALIZING;
FragmentHostCallback mHost;
FragmentController mController;
FragmentContainer mContainer;
Fragment mParent;
static Field sAnimationListenerField = null;
boolean mNeedMenuInvalidate;
boolean mStateSaved;
boolean mDestroyed;
String mNoTransactionsBecause;
boolean mHavePendingDeferredStart;
// Temporary vars for state save and restore.
Bundle mStateBundle = null;
SparseArray<Parcelable> mStateArray = null;
Runnable mExecCommit = new Runnable() {
@Override
public void run() {
execPendingActions();
}
};
static boolean modifiesAlpha(Animation anim) {
if (anim instanceof AlphaAnimation) {
return true;
} else if (anim instanceof AnimationSet) {
List<Animation> anims = ((AnimationSet) anim).getAnimations();
for (int i = 0; i < anims.size(); i++) {
if (anims.get(i) instanceof AlphaAnimation) {
return true;
}
}
}
return false;
}
static boolean shouldRunOnHWLayer(View v, Animation anim) {
return ViewCompat.getLayerType(v) == ViewCompat.LAYER_TYPE_NONE
&& ViewCompat.hasOverlappingRendering(v)
&& modifiesAlpha(anim);
}
private void throwException(RuntimeException ex) {
Log.e(TAG, ex.getMessage());
Log.e(TAG, "Activity state:");
LogWriter logw = new LogWriter(TAG);
PrintWriter pw = new PrintWriter(logw);
if (mHost != null) {
try {
mHost.onDump(" ", null, pw, new String[] { });
} catch (Exception e) {
Log.e(TAG, "Failed dumping state", e);
}
} else {
try {
dump(" ", null, pw, new String[] { });
} catch (Exception e) {
Log.e(TAG, "Failed dumping state", e);
}
}
throw ex;
}
@Override
public FragmentTransaction beginTransaction() {
return new BackStackRecord(this);
}
@Override
public boolean executePendingTransactions() {
return execPendingActions();
}
@Override
public void popBackStack() {
enqueueAction(new Runnable() {
@Override public void run() {
popBackStackState(mHost.getHandler(), null, -1, 0);
}
}, false);
}
@Override
public boolean popBackStackImmediate() {
checkStateLoss();
executePendingTransactions();
return popBackStackState(mHost.getHandler(), null, -1, 0);
}
@Override
public void popBackStack(final String name, final int flags) {
enqueueAction(new Runnable() {
@Override public void run() {
popBackStackState(mHost.getHandler(), name, -1, flags);
}
}, false);
}
@Override
public boolean popBackStackImmediate(String name, int flags) {
checkStateLoss();
executePendingTransactions();
return popBackStackState(mHost.getHandler(), name, -1, flags);
}
@Override
public void popBackStack(final int id, final int flags) {
if (id < 0) {
throw new IllegalArgumentException("Bad id: " + id);
}
enqueueAction(new Runnable() {
@Override public void run() {
popBackStackState(mHost.getHandler(), null, id, flags);
}
}, false);
}
@Override
public boolean popBackStackImmediate(int id, int flags) {
checkStateLoss();
executePendingTransactions();
if (id < 0) {
throw new IllegalArgumentException("Bad id: " + id);
}
return popBackStackState(mHost.getHandler(), null, id, flags);
}
@Override
public int getBackStackEntryCount() {
return mBackStack != null ? mBackStack.size() : 0;
}
@Override
public BackStackEntry getBackStackEntryAt(int index) {
return mBackStack.get(index);
}
@Override
public void addOnBackStackChangedListener(OnBackStackChangedListener listener) {
if (mBackStackChangeListeners == null) {
mBackStackChangeListeners = new ArrayList<OnBackStackChangedListener>();
}
mBackStackChangeListeners.add(listener);
}
@Override
public void removeOnBackStackChangedListener(OnBackStackChangedListener listener) {
if (mBackStackChangeListeners != null) {
mBackStackChangeListeners.remove(listener);
}
}
@Override
public void putFragment(Bundle bundle, String key, Fragment fragment) {
if (fragment.mIndex < 0) {
throwException(new IllegalStateException("Fragment " + fragment
+ " is not currently in the FragmentManager"));
}
bundle.putInt(key, fragment.mIndex);
}
@Override
public Fragment getFragment(Bundle bundle, String key) {
int index = bundle.getInt(key, -1);
if (index == -1) {
return null;
}
if (index >= mActive.size()) {
throwException(new IllegalStateException("Fragment no longer exists for key "
+ key + ": index " + index));
}
Fragment f = mActive.get(index);
if (f == null) {
throwException(new IllegalStateException("Fragment no longer exists for key "
+ key + ": index " + index));
}
return f;
}
@Override
public List<Fragment> getFragments() {
return mActive;
}
@Override
public Fragment.SavedState saveFragmentInstanceState(Fragment fragment) {
if (fragment.mIndex < 0) {
throwException( new IllegalStateException("Fragment " + fragment
+ " is not currently in the FragmentManager"));
}
if (fragment.mState > Fragment.INITIALIZING) {
Bundle result = saveFragmentBasicState(fragment);
return result != null ? new Fragment.SavedState(result) : null;
}
return null;
}
@Override
public boolean isDestroyed() {
return mDestroyed;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder(128);
sb.append("FragmentManager{");
sb.append(Integer.toHexString(System.identityHashCode(this)));
sb.append(" in ");
if (mParent != null) {
DebugUtils.buildShortClassTag(mParent, sb);
} else {
DebugUtils.buildShortClassTag(mHost, sb);
}
sb.append("}}");
return sb.toString();
}
@Override
public void dump(String prefix, FileDescriptor fd, PrintWriter writer, String[] args) {
String innerPrefix = prefix + " ";
int N;
if (mActive != null) {
N = mActive.size();
if (N > 0) {
writer.print(prefix); writer.print("Active Fragments in ");
writer.print(Integer.toHexString(System.identityHashCode(this)));
writer.println(":");
for (int i=0; i<N; i++) {
Fragment f = mActive.get(i);
writer.print(prefix); writer.print(" #"); writer.print(i);
writer.print(": "); writer.println(f);
if (f != null) {
f.dump(innerPrefix, fd, writer, args);
}
}
}
}
if (mAdded != null) {
N = mAdded.size();
if (N > 0) {
writer.print(prefix); writer.println("Added Fragments:");
for (int i=0; i<N; i++) {
Fragment f = mAdded.get(i);
writer.print(prefix); writer.print(" #"); writer.print(i);
writer.print(": "); writer.println(f.toString());
}
}
}
if (mCreatedMenus != null) {
N = mCreatedMenus.size();
if (N > 0) {
writer.print(prefix); writer.println("Fragments Created Menus:");
for (int i=0; i<N; i++) {
Fragment f = mCreatedMenus.get(i);
writer.print(prefix); writer.print(" #"); writer.print(i);
writer.print(": "); writer.println(f.toString());
}
}
}
if (mBackStack != null) {
N = mBackStack.size();
if (N > 0) {
writer.print(prefix); writer.println("Back Stack:");
for (int i=0; i<N; i++) {
BackStackRecord bs = mBackStack.get(i);
writer.print(prefix); writer.print(" #"); writer.print(i);
writer.print(": "); writer.println(bs.toString());
bs.dump(innerPrefix, fd, writer, args);
}
}
}
synchronized (this) {
if (mBackStackIndices != null) {
N = mBackStackIndices.size();
if (N > 0) {
writer.print(prefix); writer.println("Back Stack Indices:");
for (int i=0; i<N; i++) {
BackStackRecord bs = mBackStackIndices.get(i);
writer.print(prefix); writer.print(" #"); writer.print(i);
writer.print(": "); writer.println(bs);
}
}
}
if (mAvailBackStackIndices != null && mAvailBackStackIndices.size() > 0) {
writer.print(prefix); writer.print("mAvailBackStackIndices: ");
writer.println(Arrays.toString(mAvailBackStackIndices.toArray()));
}
}
if (mPendingActions != null) {
N = mPendingActions.size();
if (N > 0) {
writer.print(prefix); writer.println("Pending Actions:");
for (int i=0; i<N; i++) {
Runnable r = mPendingActions.get(i);
writer.print(prefix); writer.print(" #"); writer.print(i);
writer.print(": "); writer.println(r);
}
}
}
writer.print(prefix); writer.println("FragmentManager misc state:");
writer.print(prefix); writer.print(" mHost="); writer.println(mHost);
writer.print(prefix); writer.print(" mContainer="); writer.println(mContainer);
if (mParent != null) {
writer.print(prefix); writer.print(" mParent="); writer.println(mParent);
}
writer.print(prefix); writer.print(" mCurState="); writer.print(mCurState);
writer.print(" mStateSaved="); writer.print(mStateSaved);
writer.print(" mDestroyed="); writer.println(mDestroyed);
if (mNeedMenuInvalidate) {
writer.print(prefix); writer.print(" mNeedMenuInvalidate=");
writer.println(mNeedMenuInvalidate);
}
if (mNoTransactionsBecause != null) {
writer.print(prefix); writer.print(" mNoTransactionsBecause=");
writer.println(mNoTransactionsBecause);
}
if (mAvailIndices != null && mAvailIndices.size() > 0) {
writer.print(prefix); writer.print(" mAvailIndices: ");
writer.println(Arrays.toString(mAvailIndices.toArray()));
}
}
static final Interpolator DECELERATE_QUINT = new DecelerateInterpolator(2.5f);
static final Interpolator DECELERATE_CUBIC = new DecelerateInterpolator(1.5f);
static final Interpolator ACCELERATE_QUINT = new AccelerateInterpolator(2.5f);
static final Interpolator ACCELERATE_CUBIC = new AccelerateInterpolator(1.5f);
static final int ANIM_DUR = 220;
static Animation makeOpenCloseAnimation(Context context, float startScale,
float endScale, float startAlpha, float endAlpha) {
AnimationSet set = new AnimationSet(false);
ScaleAnimation scale = new ScaleAnimation(startScale, endScale, startScale, endScale,
Animation.RELATIVE_TO_SELF, .5f, Animation.RELATIVE_TO_SELF, .5f);
scale.setInterpolator(DECELERATE_QUINT);
scale.setDuration(ANIM_DUR);
set.addAnimation(scale);
AlphaAnimation alpha = new AlphaAnimation(startAlpha, endAlpha);
alpha.setInterpolator(DECELERATE_CUBIC);
alpha.setDuration(ANIM_DUR);
set.addAnimation(alpha);
return set;
}
static Animation makeFadeAnimation(Context context, float start, float end) {
AlphaAnimation anim = new AlphaAnimation(start, end);
anim.setInterpolator(DECELERATE_CUBIC);
anim.setDuration(ANIM_DUR);
return anim;
}
Animation loadAnimation(Fragment fragment, int transit, boolean enter,
int transitionStyle) {
Animation animObj = fragment.onCreateAnimation(transit, enter,
fragment.mNextAnim);
if (animObj != null) {
return animObj;
}
if (fragment.mNextAnim != 0) {
Animation anim = AnimationUtils.loadAnimation(mHost.getContext(), fragment.mNextAnim);
if (anim != null) {
return anim;
}
}
if (transit == 0) {
return null;
}
int styleIndex = transitToStyleIndex(transit, enter);
if (styleIndex < 0) {
return null;
}
switch (styleIndex) {
case ANIM_STYLE_OPEN_ENTER:
return makeOpenCloseAnimation(mHost.getContext(), 1.125f, 1.0f, 0, 1);
case ANIM_STYLE_OPEN_EXIT:
return makeOpenCloseAnimation(mHost.getContext(), 1.0f, .975f, 1, 0);
case ANIM_STYLE_CLOSE_ENTER:
return makeOpenCloseAnimation(mHost.getContext(), .975f, 1.0f, 0, 1);
case ANIM_STYLE_CLOSE_EXIT:
return makeOpenCloseAnimation(mHost.getContext(), 1.0f, 1.075f, 1, 0);
case ANIM_STYLE_FADE_ENTER:
return makeFadeAnimation(mHost.getContext(), 0, 1);
case ANIM_STYLE_FADE_EXIT:
return makeFadeAnimation(mHost.getContext(), 1, 0);
}
if (transitionStyle == 0 && mHost.onHasWindowAnimations()) {
transitionStyle = mHost.onGetWindowAnimations();
}
if (transitionStyle == 0) {
return null;
}
//TypedArray attrs = mActivity.obtainStyledAttributes(transitionStyle,
// com.android.internal.R.styleable.FragmentAnimation);
//int anim = attrs.getResourceId(styleIndex, 0);
//attrs.recycle();
//if (anim == 0) {
// return null;
//}
//return AnimatorInflater.loadAnimator(mActivity, anim);
return null;
}
public void performPendingDeferredStart(Fragment f) {
if (f.mDeferStart) {
if (mExecutingActions) {
// Wait until we're done executing our pending transactions
mHavePendingDeferredStart = true;
return;
}
f.mDeferStart = false;
moveToState(f, mCurState, 0, 0, false);
}
}
/**
* Sets the to be animated view on hardware layer during the animation. Note
* that calling this will replace any existing animation listener on the animation
* with a new one, as animations do not support more than one listeners. Therefore,
* animations that already have listeners should do the layer change operations
* in their existing listeners, rather than calling this function.
*/
private void setHWLayerAnimListenerIfAlpha(final View v, Animation anim) {
if (v == null || anim == null) {
return;
}
if (shouldRunOnHWLayer(v, anim)) {
AnimationListener originalListener = null;
try {
if (sAnimationListenerField == null) {
sAnimationListenerField = Animation.class.getDeclaredField("mListener");
sAnimationListenerField.setAccessible(true);
}
originalListener = (AnimationListener) sAnimationListenerField.get(anim);
} catch (NoSuchFieldException e) {
Log.e(TAG, "No field with the name mListener is found in Animation class", e);
} catch (IllegalAccessException e) {
Log.e(TAG, "Cannot access Animation's mListener field", e);
}
// If there's already a listener set on the animation, we need wrap the new listener
// around the existing listener, so that they will both get animation listener
// callbacks.
anim.setAnimationListener(new AnimateOnHWLayerIfNeededListener(v, anim,
originalListener));
}
}
void moveToState(Fragment f, int newState, int transit, int transitionStyle,
boolean keepActive) {
// Fragments that are not currently added will sit in the onCreate() state.
if ((!f.mAdded || f.mDetached) && newState > Fragment.CREATED) {
newState = Fragment.CREATED;
}
if (f.mRemoving && newState > f.mState) {
// While removing a fragment, we can't change it to a higher state.
newState = f.mState;
}
// Defer start if requested; don't allow it to move to STARTED or higher
// if it's not already started.
if (f.mDeferStart && f.mState < Fragment.STARTED && newState > Fragment.STOPPED) {
newState = Fragment.STOPPED;
}
if (f.mState < newState) {
// For fragments that are created from a layout, when restoring from
// state we don't want to allow them to be created until they are
// being reloaded from the layout.
if (f.mFromLayout && !f.mInLayout) {
return;
}
if (f.mAnimatingAway != null) {
// The fragment is currently being animated... but! Now we
// want to move our state back up. Give up on waiting for the
// animation, move to whatever the final state should be once
// the animation is done, and then we can proceed from there.
f.mAnimatingAway = null;
moveToState(f, f.mStateAfterAnimating, 0, 0, true);
}
switch (f.mState) {
case Fragment.INITIALIZING:
if (DEBUG) Log.v(TAG, "moveto CREATED: " + f);
if (f.mSavedFragmentState != null) {
f.mSavedFragmentState.setClassLoader(mHost.getContext().getClassLoader());
f.mSavedViewState = f.mSavedFragmentState.getSparseParcelableArray(
FragmentManagerImpl.VIEW_STATE_TAG);
f.mTarget = getFragment(f.mSavedFragmentState,
FragmentManagerImpl.TARGET_STATE_TAG);
if (f.mTarget != null) {
f.mTargetRequestCode = f.mSavedFragmentState.getInt(
FragmentManagerImpl.TARGET_REQUEST_CODE_STATE_TAG, 0);
}
f.mUserVisibleHint = f.mSavedFragmentState.getBoolean(
FragmentManagerImpl.USER_VISIBLE_HINT_TAG, true);
if (!f.mUserVisibleHint) {
f.mDeferStart = true;
if (newState > Fragment.STOPPED) {
newState = Fragment.STOPPED;
}
}
}
f.mHost = mHost;
f.mParentFragment = mParent;
f.mFragmentManager = mParent != null
? mParent.mChildFragmentManager : mHost.getFragmentManagerImpl();
f.mCalled = false;
f.onAttach(mHost.getContext());
if (!f.mCalled) {
throw new SuperNotCalledException("Fragment " + f
+ " did not call through to super.onAttach()");
}
if (f.mParentFragment == null) {
mHost.onAttachFragment(f);
}
if (!f.mRetaining) {
f.performCreate(f.mSavedFragmentState);
}
f.mRetaining = false;
if (f.mFromLayout) {
// For fragments that are part of the content view
// layout, we need to instantiate the view immediately
// and the inflater will take care of adding it.
f.mView = f.performCreateView(f.getLayoutInflater(
f.mSavedFragmentState), null, f.mSavedFragmentState);
if (f.mView != null) {
f.mInnerView = f.mView;
if (Build.VERSION.SDK_INT >= 11) {
ViewCompat.setSaveFromParentEnabled(f.mView, false);
} else {
f.mView = NoSaveStateFrameLayout.wrap(f.mView);
}
if (f.mHidden) f.mView.setVisibility(View.GONE);
f.onViewCreated(f.mView, f.mSavedFragmentState);
} else {
f.mInnerView = null;
}
}
case Fragment.CREATED:
if (newState > Fragment.CREATED) {
if (DEBUG) Log.v(TAG, "moveto ACTIVITY_CREATED: " + f);
if (!f.mFromLayout) {
ViewGroup container = null;
if (f.mContainerId != 0) {
container = (ViewGroup)mContainer.onFindViewById(f.mContainerId);
if (container == null && !f.mRestored) {
throwException(new IllegalArgumentException(
"No view found for id 0x"
+ Integer.toHexString(f.mContainerId) + " ("
+ f.getResources().getResourceName(f.mContainerId)
+ ") for fragment " + f));
}
}
f.mContainer = container;
f.mView = f.performCreateView(f.getLayoutInflater(
f.mSavedFragmentState), container, f.mSavedFragmentState);
if (f.mView != null) {
f.mInnerView = f.mView;
if (Build.VERSION.SDK_INT >= 11) {
ViewCompat.setSaveFromParentEnabled(f.mView, false);
} else {
f.mView = NoSaveStateFrameLayout.wrap(f.mView);
}
if (container != null) {
Animation anim = loadAnimation(f, transit, true,
transitionStyle);
if (anim != null) {
setHWLayerAnimListenerIfAlpha(f.mView, anim);
f.mView.startAnimation(anim);
}
container.addView(f.mView);
}
if (f.mHidden) f.mView.setVisibility(View.GONE);
f.onViewCreated(f.mView, f.mSavedFragmentState);
} else {
f.mInnerView = null;
}
}
f.performActivityCreated(f.mSavedFragmentState);
if (f.mView != null) {
f.restoreViewState(f.mSavedFragmentState);
}
f.mSavedFragmentState = null;
}
case Fragment.ACTIVITY_CREATED:
case Fragment.STOPPED:
if (newState > Fragment.STOPPED) {
if (DEBUG) Log.v(TAG, "moveto STARTED: " + f);
f.performStart();
}
case Fragment.STARTED:
if (newState > Fragment.STARTED) {
if (DEBUG) Log.v(TAG, "moveto RESUMED: " + f);
f.mResumed = true;
f.performResume();
f.mSavedFragmentState = null;
f.mSavedViewState = null;
}
}
} else if (f.mState > newState) {
switch (f.mState) {
case Fragment.RESUMED:
if (newState < Fragment.RESUMED) {
if (DEBUG) Log.v(TAG, "movefrom RESUMED: " + f);
f.performPause();
f.mResumed = false;
}
case Fragment.STARTED:
if (newState < Fragment.STARTED) {
if (DEBUG) Log.v(TAG, "movefrom STARTED: " + f);
f.performStop();
}
case Fragment.STOPPED:
if (newState < Fragment.STOPPED) {
if (DEBUG) Log.v(TAG, "movefrom STOPPED: " + f);
f.performReallyStop();
}
case Fragment.ACTIVITY_CREATED:
if (newState < Fragment.ACTIVITY_CREATED) {
if (DEBUG) Log.v(TAG, "movefrom ACTIVITY_CREATED: " + f);
if (f.mView != null) {
// Need to save the current view state if not
// done already.
if (mHost.onShouldSaveFragmentState(f) && f.mSavedViewState == null) {
saveFragmentViewState(f);
}
}
f.performDestroyView();
if (f.mView != null && f.mContainer != null) {
Animation anim = null;
if (mCurState > Fragment.INITIALIZING && !mDestroyed) {
anim = loadAnimation(f, transit, false,
transitionStyle);
}
if (anim != null) {
final Fragment fragment = f;
f.mAnimatingAway = f.mView;
f.mStateAfterAnimating = newState;
final View viewToAnimate = f.mView;
anim.setAnimationListener(new AnimateOnHWLayerIfNeededListener(
viewToAnimate, anim) {
@Override
public void onAnimationEnd(Animation animation) {
super.onAnimationEnd(animation);
if (fragment.mAnimatingAway != null) {
fragment.mAnimatingAway = null;
moveToState(fragment, fragment.mStateAfterAnimating,
0, 0, false);
}
}
});
f.mView.startAnimation(anim);
}
f.mContainer.removeView(f.mView);
}
f.mContainer = null;
f.mView = null;
f.mInnerView = null;
}
case Fragment.CREATED:
if (newState < Fragment.CREATED) {
if (mDestroyed) {
if (f.mAnimatingAway != null) {
// The fragment's containing activity is
// being destroyed, but this fragment is
// currently animating away. Stop the
// animation right now -- it is not needed,
// and we can't wait any more on destroying
// the fragment.
View v = f.mAnimatingAway;
f.mAnimatingAway = null;
v.clearAnimation();
}
}
if (f.mAnimatingAway != null) {
// We are waiting for the fragment's view to finish
// animating away. Just make a note of the state
// the fragment now should move to once the animation
// is done.
f.mStateAfterAnimating = newState;
newState = Fragment.CREATED;
} else {
if (DEBUG) Log.v(TAG, "movefrom CREATED: " + f);
if (!f.mRetaining) {
f.performDestroy();
}
f.mCalled = false;
f.onDetach();
if (!f.mCalled) {
throw new SuperNotCalledException("Fragment " + f
+ " did not call through to super.onDetach()");
}
if (!keepActive) {
if (!f.mRetaining) {
makeInactive(f);
} else {
f.mHost = null;
f.mParentFragment = null;
f.mFragmentManager = null;
f.mChildFragmentManager = null;
}
}
}
}
}
}
f.mState = newState;
}
void moveToState(Fragment f) {
moveToState(f, mCurState, 0, 0, false);
}
void moveToState(int newState, boolean always) {
moveToState(newState, 0, 0, always);
}
void moveToState(int newState, int transit, int transitStyle, boolean always) {
if (mHost == null && newState != Fragment.INITIALIZING) {
throw new IllegalStateException("No host");
}
if (!always && mCurState == newState) {
return;
}
mCurState = newState;
if (mActive != null) {
boolean loadersRunning = false;
for (int i=0; i<mActive.size(); i++) {
Fragment f = mActive.get(i);
if (f != null) {
moveToState(f, newState, transit, transitStyle, false);
if (f.mLoaderManager != null) {
loadersRunning |= f.mLoaderManager.hasRunningLoaders();
}
}
}
if (!loadersRunning) {
startPendingDeferredFragments();
}
if (mNeedMenuInvalidate && mHost != null && mCurState == Fragment.RESUMED) {
mHost.onSupportInvalidateOptionsMenu();
mNeedMenuInvalidate = false;
}
}
}
void startPendingDeferredFragments() {
if (mActive == null) return;
for (int i=0; i<mActive.size(); i++) {
Fragment f = mActive.get(i);
if (f != null) {
performPendingDeferredStart(f);
}
}
}
void makeActive(Fragment f) {
if (f.mIndex >= 0) {
return;
}
if (mAvailIndices == null || mAvailIndices.size() <= 0) {
if (mActive == null) {
mActive = new ArrayList<Fragment>();
}
f.setIndex(mActive.size(), mParent);
mActive.add(f);
} else {
f.setIndex(mAvailIndices.remove(mAvailIndices.size()-1), mParent);
mActive.set(f.mIndex, f);
}
if (DEBUG) Log.v(TAG, "Allocated fragment index " + f);
}
void makeInactive(Fragment f) {
if (f.mIndex < 0) {
return;
}
if (DEBUG) Log.v(TAG, "Freeing fragment index " + f);
mActive.set(f.mIndex, null);
if (mAvailIndices == null) {
mAvailIndices = new ArrayList<Integer>();
}
mAvailIndices.add(f.mIndex);
mHost.inactivateFragment(f.mWho);
f.initState();
}
public void addFragment(Fragment fragment, boolean moveToStateNow) {
if (mAdded == null) {
mAdded = new ArrayList<Fragment>();
}
if (DEBUG) Log.v(TAG, "add: " + fragment);
makeActive(fragment);
if (!fragment.mDetached) {
if (mAdded.contains(fragment)) {
throw new IllegalStateException("Fragment already added: " + fragment);
}
mAdded.add(fragment);
fragment.mAdded = true;
fragment.mRemoving = false;
if (fragment.mHasMenu && fragment.mMenuVisible) {
mNeedMenuInvalidate = true;
}
if (moveToStateNow) {
moveToState(fragment);
}
}
}
public void removeFragment(Fragment fragment, int transition, int transitionStyle) {
if (DEBUG) Log.v(TAG, "remove: " + fragment + " nesting=" + fragment.mBackStackNesting);
final boolean inactive = !fragment.isInBackStack();
if (!fragment.mDetached || inactive) {
if (mAdded != null) {
mAdded.remove(fragment);
}
if (fragment.mHasMenu && fragment.mMenuVisible) {
mNeedMenuInvalidate = true;
}
fragment.mAdded = false;
fragment.mRemoving = true;
moveToState(fragment, inactive ? Fragment.INITIALIZING : Fragment.CREATED,
transition, transitionStyle, false);
}
}
public void hideFragment(Fragment fragment, int transition, int transitionStyle) {
if (DEBUG) Log.v(TAG, "hide: " + fragment);
if (!fragment.mHidden) {
fragment.mHidden = true;
if (fragment.mView != null) {
Animation anim = loadAnimation(fragment, transition, false,
transitionStyle);
if (anim != null) {
setHWLayerAnimListenerIfAlpha(fragment.mView, anim);
fragment.mView.startAnimation(anim);
}
fragment.mView.setVisibility(View.GONE);
}
if (fragment.mAdded && fragment.mHasMenu && fragment.mMenuVisible) {
mNeedMenuInvalidate = true;
}
fragment.onHiddenChanged(true);
}
}
public void showFragment(Fragment fragment, int transition, int transitionStyle) {
if (DEBUG) Log.v(TAG, "show: " + fragment);
if (fragment.mHidden) {
fragment.mHidden = false;
if (fragment.mView != null) {
Animation anim = loadAnimation(fragment, transition, true,
transitionStyle);
if (anim != null) {
setHWLayerAnimListenerIfAlpha(fragment.mView, anim);
fragment.mView.startAnimation(anim);
}
fragment.mView.setVisibility(View.VISIBLE);
}
if (fragment.mAdded && fragment.mHasMenu && fragment.mMenuVisible) {
mNeedMenuInvalidate = true;
}
fragment.onHiddenChanged(false);
}
}
public void detachFragment(Fragment fragment, int transition, int transitionStyle) {
if (DEBUG) Log.v(TAG, "detach: " + fragment);
if (!fragment.mDetached) {
fragment.mDetached = true;
if (fragment.mAdded) {
// We are not already in back stack, so need to remove the fragment.
if (mAdded != null) {
if (DEBUG) Log.v(TAG, "remove from detach: " + fragment);
mAdded.remove(fragment);
}
if (fragment.mHasMenu && fragment.mMenuVisible) {
mNeedMenuInvalidate = true;
}
fragment.mAdded = false;
moveToState(fragment, Fragment.CREATED, transition, transitionStyle, false);
}
}
}
public void attachFragment(Fragment fragment, int transition, int transitionStyle) {
if (DEBUG) Log.v(TAG, "attach: " + fragment);
if (fragment.mDetached) {
fragment.mDetached = false;
if (!fragment.mAdded) {
if (mAdded == null) {
mAdded = new ArrayList<Fragment>();
}
if (mAdded.contains(fragment)) {
throw new IllegalStateException("Fragment already added: " + fragment);
}
if (DEBUG) Log.v(TAG, "add from attach: " + fragment);
mAdded.add(fragment);
fragment.mAdded = true;
if (fragment.mHasMenu && fragment.mMenuVisible) {
mNeedMenuInvalidate = true;
}
moveToState(fragment, mCurState, transition, transitionStyle, false);
}
}
}
public Fragment findFragmentById(int id) {
if (mAdded != null) {
// First look through added fragments.
for (int i=mAdded.size()-1; i>=0; i--) {
Fragment f = mAdded.get(i);
if (f != null && f.mFragmentId == id) {
return f;
}
}
}
if (mActive != null) {
// Now for any known fragment.
for (int i=mActive.size()-1; i>=0; i--) {
Fragment f = mActive.get(i);
if (f != null && f.mFragmentId == id) {
return f;
}
}
}
return null;
}
public Fragment findFragmentByTag(String tag) {
if (mAdded != null && tag != null) {
// First look through added fragments.
for (int i=mAdded.size()-1; i>=0; i--) {
Fragment f = mAdded.get(i);
if (f != null && tag.equals(f.mTag)) {
return f;
}
}
}
if (mActive != null && tag != null) {
// Now for any known fragment.
for (int i=mActive.size()-1; i>=0; i--) {
Fragment f = mActive.get(i);
if (f != null && tag.equals(f.mTag)) {
return f;
}
}
}
return null;
}
public Fragment findFragmentByWho(String who) {
if (mActive != null && who != null) {
for (int i=mActive.size()-1; i>=0; i--) {
Fragment f = mActive.get(i);
if (f != null && (f=f.findFragmentByWho(who)) != null) {
return f;
}
}
}
return null;
}
private void checkStateLoss() {
if (mStateSaved) {
throw new IllegalStateException(
"Can not perform this action after onSaveInstanceState");
}
if (mNoTransactionsBecause != null) {
throw new IllegalStateException(
"Can not perform this action inside of " + mNoTransactionsBecause);
}
}
/**
* Adds an action to the queue of pending actions.
*
* @param action the action to add
* @param allowStateLoss whether to allow loss of state information
* @throws IllegalStateException if the activity has been destroyed
*/
public void enqueueAction(Runnable action, boolean allowStateLoss) {
if (!allowStateLoss) {
checkStateLoss();
}
synchronized (this) {
if (mDestroyed || mHost == null) {
throw new IllegalStateException("Activity has been destroyed");
}
if (mPendingActions == null) {
mPendingActions = new ArrayList<Runnable>();
}
mPendingActions.add(action);
if (mPendingActions.size() == 1) {
mHost.getHandler().removeCallbacks(mExecCommit);
mHost.getHandler().post(mExecCommit);
}
}
}
public int allocBackStackIndex(BackStackRecord bse) {
synchronized (this) {
if (mAvailBackStackIndices == null || mAvailBackStackIndices.size() <= 0) {
if (mBackStackIndices == null) {
mBackStackIndices = new ArrayList<BackStackRecord>();
}
int index = mBackStackIndices.size();
if (DEBUG) Log.v(TAG, "Setting back stack index " + index + " to " + bse);
mBackStackIndices.add(bse);
return index;
} else {
int index = mAvailBackStackIndices.remove(mAvailBackStackIndices.size()-1);
if (DEBUG) Log.v(TAG, "Adding back stack index " + index + " with " + bse);
mBackStackIndices.set(index, bse);
return index;
}
}
}
public void setBackStackIndex(int index, BackStackRecord bse) {
synchronized (this) {
if (mBackStackIndices == null) {
mBackStackIndices = new ArrayList<BackStackRecord>();
}
int N = mBackStackIndices.size();
if (index < N) {
if (DEBUG) Log.v(TAG, "Setting back stack index " + index + " to " + bse);
mBackStackIndices.set(index, bse);
} else {
while (N < index) {
mBackStackIndices.add(null);
if (mAvailBackStackIndices == null) {
mAvailBackStackIndices = new ArrayList<Integer>();
}
if (DEBUG) Log.v(TAG, "Adding available back stack index " + N);
mAvailBackStackIndices.add(N);
N++;
}
if (DEBUG) Log.v(TAG, "Adding back stack index " + index + " with " + bse);
mBackStackIndices.add(bse);
}
}
}
public void freeBackStackIndex(int index) {
synchronized (this) {
mBackStackIndices.set(index, null);
if (mAvailBackStackIndices == null) {
mAvailBackStackIndices = new ArrayList<Integer>();
}
if (DEBUG) Log.v(TAG, "Freeing back stack index " + index);
mAvailBackStackIndices.add(index);
}
}
/**
* Only call from main thread!
*/
public boolean execPendingActions() {
if (mExecutingActions) {
throw new IllegalStateException("Recursive entry to executePendingTransactions");
}
if (Looper.myLooper() != mHost.getHandler().getLooper()) {
throw new IllegalStateException("Must be called from main thread of process");
}
boolean didSomething = false;
while (true) {
int numActions;
synchronized (this) {
if (mPendingActions == null || mPendingActions.size() == 0) {
break;
}
numActions = mPendingActions.size();
if (mTmpActions == null || mTmpActions.length < numActions) {
mTmpActions = new Runnable[numActions];
}
mPendingActions.toArray(mTmpActions);
mPendingActions.clear();
mHost.getHandler().removeCallbacks(mExecCommit);
}
mExecutingActions = true;
for (int i=0; i<numActions; i++) {
mTmpActions[i].run();
mTmpActions[i] = null;
}
mExecutingActions = false;
didSomething = true;
}
if (mHavePendingDeferredStart) {
boolean loadersRunning = false;
for (int i=0; i<mActive.size(); i++) {
Fragment f = mActive.get(i);
if (f != null && f.mLoaderManager != null) {
loadersRunning |= f.mLoaderManager.hasRunningLoaders();
}
}
if (!loadersRunning) {
mHavePendingDeferredStart = false;
startPendingDeferredFragments();
}
}
return didSomething;
}
void reportBackStackChanged() {
if (mBackStackChangeListeners != null) {
for (int i=0; i<mBackStackChangeListeners.size(); i++) {
mBackStackChangeListeners.get(i).onBackStackChanged();
}
}
}
void addBackStackState(BackStackRecord state) {
if (mBackStack == null) {
mBackStack = new ArrayList<BackStackRecord>();
}
mBackStack.add(state);
reportBackStackChanged();
}
@SuppressWarnings("unused")
boolean popBackStackState(Handler handler, String name, int id, int flags) {
if (mBackStack == null) {
return false;
}
if (name == null && id < 0 && (flags&POP_BACK_STACK_INCLUSIVE) == 0) {
int last = mBackStack.size()-1;
if (last < 0) {
return false;
}
final BackStackRecord bss = mBackStack.remove(last);
SparseArray<Fragment> firstOutFragments = new SparseArray<Fragment>();
SparseArray<Fragment> lastInFragments = new SparseArray<Fragment>();
bss.calculateBackFragments(firstOutFragments, lastInFragments);
bss.popFromBackStack(true, null, firstOutFragments, lastInFragments);
reportBackStackChanged();
} else {
int index = -1;
if (name != null || id >= 0) {
// If a name or ID is specified, look for that place in
// the stack.
index = mBackStack.size()-1;
while (index >= 0) {
BackStackRecord bss = mBackStack.get(index);
if (name != null && name.equals(bss.getName())) {
break;
}
if (id >= 0 && id == bss.mIndex) {
break;
}
index--;
}
if (index < 0) {
return false;
}
if ((flags&POP_BACK_STACK_INCLUSIVE) != 0) {
index--;
// Consume all following entries that match.
while (index >= 0) {
BackStackRecord bss = mBackStack.get(index);
if ((name != null && name.equals(bss.getName()))
|| (id >= 0 && id == bss.mIndex)) {
index--;
continue;
}
break;
}
}
}
if (index == mBackStack.size()-1) {
return false;
}
final ArrayList<BackStackRecord> states
= new ArrayList<BackStackRecord>();
for (int i=mBackStack.size()-1; i>index; i--) {
states.add(mBackStack.remove(i));
}
final int LAST = states.size()-1;
SparseArray<Fragment> firstOutFragments = new SparseArray<Fragment>();
SparseArray<Fragment> lastInFragments = new SparseArray<Fragment>();
for (int i=0; i<=LAST; i++) {
states.get(i).calculateBackFragments(firstOutFragments, lastInFragments);
}
BackStackRecord.TransitionState state = null;
for (int i=0; i<=LAST; i++) {
if (DEBUG) Log.v(TAG, "Popping back stack state: " + states.get(i));
state = states.get(i).popFromBackStack(i == LAST, state,
firstOutFragments, lastInFragments);
}
reportBackStackChanged();
}
return true;
}
ArrayList<Fragment> retainNonConfig() {
ArrayList<Fragment> fragments = null;
if (mActive != null) {
for (int i=0; i<mActive.size(); i++) {
Fragment f = mActive.get(i);
if (f != null && f.mRetainInstance) {
if (fragments == null) {
fragments = new ArrayList<Fragment>();
}
fragments.add(f);
f.mRetaining = true;
f.mTargetIndex = f.mTarget != null ? f.mTarget.mIndex : -1;
if (DEBUG) Log.v(TAG, "retainNonConfig: keeping retained " + f);
}
}
}
return fragments;
}
void saveFragmentViewState(Fragment f) {
if (f.mInnerView == null) {
return;
}
if (mStateArray == null) {
mStateArray = new SparseArray<Parcelable>();
} else {
mStateArray.clear();
}
f.mInnerView.saveHierarchyState(mStateArray);
if (mStateArray.size() > 0) {
f.mSavedViewState = mStateArray;
mStateArray = null;
}
}
Bundle saveFragmentBasicState(Fragment f) {
Bundle result = null;
if (mStateBundle == null) {
mStateBundle = new Bundle();
}
f.performSaveInstanceState(mStateBundle);
if (!mStateBundle.isEmpty()) {
result = mStateBundle;
mStateBundle = null;
}
if (f.mView != null) {
saveFragmentViewState(f);
}
if (f.mSavedViewState != null) {
if (result == null) {
result = new Bundle();
}
result.putSparseParcelableArray(
FragmentManagerImpl.VIEW_STATE_TAG, f.mSavedViewState);
}
if (!f.mUserVisibleHint) {
if (result == null) {
result = new Bundle();
}
// Only add this if it's not the default value
result.putBoolean(FragmentManagerImpl.USER_VISIBLE_HINT_TAG, f.mUserVisibleHint);
}
return result;
}
Parcelable saveAllState() {
// Make sure all pending operations have now been executed to get
// our state update-to-date.
execPendingActions();
if (HONEYCOMB) {
// As of Honeycomb, we save state after pausing. Prior to that
// it is before pausing. With fragments this is an issue, since
// there are many things you may do after pausing but before
// stopping that change the fragment state. For those older
// devices, we will not at this point say that we have saved
// the state, so we will allow them to continue doing fragment
// transactions. This retains the same semantics as Honeycomb,
// though you do have the risk of losing the very most recent state
// if the process is killed... we'll live with that.
mStateSaved = true;
}
if (mActive == null || mActive.size() <= 0) {
return null;
}
// First collect all active fragments.
int N = mActive.size();
FragmentState[] active = new FragmentState[N];
boolean haveFragments = false;
for (int i=0; i<N; i++) {
Fragment f = mActive.get(i);
if (f != null) {
if (f.mIndex < 0) {
throwException(new IllegalStateException(
"Failure saving state: active " + f
+ " has cleared index: " + f.mIndex));
}
haveFragments = true;
FragmentState fs = new FragmentState(f);
active[i] = fs;
if (f.mState > Fragment.INITIALIZING && fs.mSavedFragmentState == null) {
fs.mSavedFragmentState = saveFragmentBasicState(f);
if (f.mTarget != null) {
if (f.mTarget.mIndex < 0) {
throwException(new IllegalStateException(
"Failure saving state: " + f
+ " has target not in fragment manager: " + f.mTarget));
}
if (fs.mSavedFragmentState == null) {
fs.mSavedFragmentState = new Bundle();
}
putFragment(fs.mSavedFragmentState,
FragmentManagerImpl.TARGET_STATE_TAG, f.mTarget);
if (f.mTargetRequestCode != 0) {
fs.mSavedFragmentState.putInt(
FragmentManagerImpl.TARGET_REQUEST_CODE_STATE_TAG,
f.mTargetRequestCode);
}
}
} else {
fs.mSavedFragmentState = f.mSavedFragmentState;
}
if (DEBUG) Log.v(TAG, "Saved state of " + f + ": "
+ fs.mSavedFragmentState);
}
}
if (!haveFragments) {
if (DEBUG) Log.v(TAG, "saveAllState: no fragments!");
return null;
}
int[] added = null;
BackStackState[] backStack = null;
// Build list of currently added fragments.
if (mAdded != null) {
N = mAdded.size();
if (N > 0) {
added = new int[N];
for (int i=0; i<N; i++) {
added[i] = mAdded.get(i).mIndex;
if (added[i] < 0) {
throwException(new IllegalStateException(
"Failure saving state: active " + mAdded.get(i)
+ " has cleared index: " + added[i]));
}
if (DEBUG) Log.v(TAG, "saveAllState: adding fragment #" + i
+ ": " + mAdded.get(i));
}
}
}
// Now save back stack.
if (mBackStack != null) {
N = mBackStack.size();
if (N > 0) {
backStack = new BackStackState[N];
for (int i=0; i<N; i++) {
backStack[i] = new BackStackState(mBackStack.get(i));
if (DEBUG) Log.v(TAG, "saveAllState: adding back stack #" + i
+ ": " + mBackStack.get(i));
}
}
}
FragmentManagerState fms = new FragmentManagerState();
fms.mActive = active;
fms.mAdded = added;
fms.mBackStack = backStack;
return fms;
}
void restoreAllState(Parcelable state, List<Fragment> nonConfig) {
// If there is no saved state at all, then there can not be
// any nonConfig fragments either, so that is that.
if (state == null) return;
FragmentManagerState fms = (FragmentManagerState)state;
if (fms.mActive == null) return;
// First re-attach any non-config instances we are retaining back
// to their saved state, so we don't try to instantiate them again.
if (nonConfig != null) {
for (int i=0; i<nonConfig.size(); i++) {
Fragment f = nonConfig.get(i);
if (DEBUG) Log.v(TAG, "restoreAllState: re-attaching retained " + f);
FragmentState fs = fms.mActive[f.mIndex];
fs.mInstance = f;
f.mSavedViewState = null;
f.mBackStackNesting = 0;
f.mInLayout = false;
f.mAdded = false;
f.mTarget = null;
if (fs.mSavedFragmentState != null) {
fs.mSavedFragmentState.setClassLoader(mHost.getContext().getClassLoader());
f.mSavedViewState = fs.mSavedFragmentState.getSparseParcelableArray(
FragmentManagerImpl.VIEW_STATE_TAG);
f.mSavedFragmentState = fs.mSavedFragmentState;
}
}
}
// Build the full list of active fragments, instantiating them from
// their saved state.
mActive = new ArrayList<Fragment>(fms.mActive.length);
if (mAvailIndices != null) {
mAvailIndices.clear();
}
for (int i=0; i<fms.mActive.length; i++) {
FragmentState fs = fms.mActive[i];
if (fs != null) {
Fragment f = fs.instantiate(mHost, mParent);
if (DEBUG) Log.v(TAG, "restoreAllState: active #" + i + ": " + f);
mActive.add(f);
// Now that the fragment is instantiated (or came from being
// retained above), clear mInstance in case we end up re-restoring
// from this FragmentState again.
fs.mInstance = null;
} else {
mActive.add(null);
if (mAvailIndices == null) {
mAvailIndices = new ArrayList<Integer>();
}
if (DEBUG) Log.v(TAG, "restoreAllState: avail #" + i);
mAvailIndices.add(i);
}
}
// Update the target of all retained fragments.
if (nonConfig != null) {
for (int i=0; i<nonConfig.size(); i++) {
Fragment f = nonConfig.get(i);
if (f.mTargetIndex >= 0) {
if (f.mTargetIndex < mActive.size()) {
f.mTarget = mActive.get(f.mTargetIndex);
} else {
Log.w(TAG, "Re-attaching retained fragment " + f
+ " target no longer exists: " + f.mTargetIndex);
f.mTarget = null;
}
}
}
}
// Build the list of currently added fragments.
if (fms.mAdded != null) {
mAdded = new ArrayList<Fragment>(fms.mAdded.length);
for (int i=0; i<fms.mAdded.length; i++) {
Fragment f = mActive.get(fms.mAdded[i]);
if (f == null) {
throwException(new IllegalStateException(
"No instantiated fragment for index #" + fms.mAdded[i]));
}
f.mAdded = true;
if (DEBUG) Log.v(TAG, "restoreAllState: added #" + i + ": " + f);
if (mAdded.contains(f)) {
throw new IllegalStateException("Already added!");
}
mAdded.add(f);
}
} else {
mAdded = null;
}
// Build the back stack.
if (fms.mBackStack != null) {
mBackStack = new ArrayList<BackStackRecord>(fms.mBackStack.length);
for (int i=0; i<fms.mBackStack.length; i++) {
BackStackRecord bse = fms.mBackStack[i].instantiate(this);
if (DEBUG) {
Log.v(TAG, "restoreAllState: back stack #" + i
+ " (index " + bse.mIndex + "): " + bse);
LogWriter logw = new LogWriter(TAG);
PrintWriter pw = new PrintWriter(logw);
bse.dump(" ", pw, false);
}
mBackStack.add(bse);
if (bse.mIndex >= 0) {
setBackStackIndex(bse.mIndex, bse);
}
}
} else {
mBackStack = null;
}
}
public void attachController(FragmentHostCallback host,
FragmentContainer container, Fragment parent) {
if (mHost != null) throw new IllegalStateException("Already attached");
mHost = host;
mContainer = container;
mParent = parent;
}
public void noteStateNotSaved() {
mStateSaved = false;
}
public void dispatchCreate() {
mStateSaved = false;
moveToState(Fragment.CREATED, false);
}
public void dispatchActivityCreated() {
mStateSaved = false;
moveToState(Fragment.ACTIVITY_CREATED, false);
}
public void dispatchStart() {
mStateSaved = false;
moveToState(Fragment.STARTED, false);
}
public void dispatchResume() {
mStateSaved = false;
moveToState(Fragment.RESUMED, false);
}
public void dispatchPause() {
moveToState(Fragment.STARTED, false);
}
public void dispatchStop() {
// See saveAllState() for the explanation of this. We do this for
// all platform versions, to keep our behavior more consistent between
// them.
mStateSaved = true;
moveToState(Fragment.STOPPED, false);
}
public void dispatchReallyStop() {
moveToState(Fragment.ACTIVITY_CREATED, false);
}
public void dispatchDestroyView() {
moveToState(Fragment.CREATED, false);
}
public void dispatchDestroy() {
mDestroyed = true;
execPendingActions();
moveToState(Fragment.INITIALIZING, false);
mHost = null;
mContainer = null;
mParent = null;
}
public void dispatchConfigurationChanged(Configuration newConfig) {
if (mAdded != null) {
for (int i=0; i<mAdded.size(); i++) {
Fragment f = mAdded.get(i);
if (f != null) {
f.performConfigurationChanged(newConfig);
}
}
}
}
public void dispatchLowMemory() {
if (mAdded != null) {
for (int i=0; i<mAdded.size(); i++) {
Fragment f = mAdded.get(i);
if (f != null) {
f.performLowMemory();
}
}
}
}
public boolean dispatchCreateOptionsMenu(Menu menu, MenuInflater inflater) {
boolean show = false;
ArrayList<Fragment> newMenus = null;
if (mAdded != null) {
for (int i=0; i<mAdded.size(); i++) {
Fragment f = mAdded.get(i);
if (f != null) {
if (f.performCreateOptionsMenu(menu, inflater)) {
show = true;
if (newMenus == null) {
newMenus = new ArrayList<Fragment>();
}
newMenus.add(f);
}
}
}
}
if (mCreatedMenus != null) {
for (int i=0; i<mCreatedMenus.size(); i++) {
Fragment f = mCreatedMenus.get(i);
if (newMenus == null || !newMenus.contains(f)) {
f.onDestroyOptionsMenu();
}
}
}
mCreatedMenus = newMenus;
return show;
}
public boolean dispatchPrepareOptionsMenu(Menu menu) {
boolean show = false;
if (mAdded != null) {
for (int i=0; i<mAdded.size(); i++) {
Fragment f = mAdded.get(i);
if (f != null) {
if (f.performPrepareOptionsMenu(menu)) {
show = true;
}
}
}
}
return show;
}
public boolean dispatchOptionsItemSelected(MenuItem item) {
if (mAdded != null) {
for (int i=0; i<mAdded.size(); i++) {
Fragment f = mAdded.get(i);
if (f != null) {
if (f.performOptionsItemSelected(item)) {
return true;
}
}
}
}
return false;
}
public boolean dispatchContextItemSelected(MenuItem item) {
if (mAdded != null) {
for (int i=0; i<mAdded.size(); i++) {
Fragment f = mAdded.get(i);
if (f != null) {
if (f.performContextItemSelected(item)) {
return true;
}
}
}
}
return false;
}
public void dispatchOptionsMenuClosed(Menu menu) {
if (mAdded != null) {
for (int i=0; i<mAdded.size(); i++) {
Fragment f = mAdded.get(i);
if (f != null) {
f.performOptionsMenuClosed(menu);
}
}
}
}
public static int reverseTransit(int transit) {
int rev = 0;
switch (transit) {
case FragmentTransaction.TRANSIT_FRAGMENT_OPEN:
rev = FragmentTransaction.TRANSIT_FRAGMENT_CLOSE;
break;
case FragmentTransaction.TRANSIT_FRAGMENT_CLOSE:
rev = FragmentTransaction.TRANSIT_FRAGMENT_OPEN;
break;
case FragmentTransaction.TRANSIT_FRAGMENT_FADE:
rev = FragmentTransaction.TRANSIT_FRAGMENT_FADE;
break;
}
return rev;
}
public static final int ANIM_STYLE_OPEN_ENTER = 1;
public static final int ANIM_STYLE_OPEN_EXIT = 2;
public static final int ANIM_STYLE_CLOSE_ENTER = 3;
public static final int ANIM_STYLE_CLOSE_EXIT = 4;
public static final int ANIM_STYLE_FADE_ENTER = 5;
public static final int ANIM_STYLE_FADE_EXIT = 6;
public static int transitToStyleIndex(int transit, boolean enter) {
int animAttr = -1;
switch (transit) {
case FragmentTransaction.TRANSIT_FRAGMENT_OPEN:
animAttr = enter ? ANIM_STYLE_OPEN_ENTER : ANIM_STYLE_OPEN_EXIT;
break;
case FragmentTransaction.TRANSIT_FRAGMENT_CLOSE:
animAttr = enter ? ANIM_STYLE_CLOSE_ENTER : ANIM_STYLE_CLOSE_EXIT;
break;
case FragmentTransaction.TRANSIT_FRAGMENT_FADE:
animAttr = enter ? ANIM_STYLE_FADE_ENTER : ANIM_STYLE_FADE_EXIT;
break;
}
return animAttr;
}
@Override
public View onCreateView(View parent, String name, Context context, AttributeSet attrs) {
if (!"fragment".equals(name)) {
return null;
}
String fname = attrs.getAttributeValue(null, "class");
TypedArray a = context.obtainStyledAttributes(attrs, FragmentTag.Fragment);
if (fname == null) {
fname = a.getString(FragmentTag.Fragment_name);
}
int id = a.getResourceId(FragmentTag.Fragment_id, View.NO_ID);
String tag = a.getString(FragmentTag.Fragment_tag);
a.recycle();
if (!Fragment.isSupportFragmentClass(mHost.getContext(), fname)) {
// Invalid support lib fragment; let the device's framework handle it.
// This will allow android.app.Fragments to do the right thing.
return null;
}
int containerId = parent != null ? parent.getId() : 0;
if (containerId == View.NO_ID && id == View.NO_ID && tag == null) {
throw new IllegalArgumentException(attrs.getPositionDescription()
+ ": Must specify unique android:id, android:tag, or have a parent with an id for " + fname);
}
// If we restored from a previous state, we may already have
// instantiated this fragment from the state and should use
// that instance instead of making a new one.
Fragment fragment = id != View.NO_ID ? findFragmentById(id) : null;
if (fragment == null && tag != null) {
fragment = findFragmentByTag(tag);
}
if (fragment == null && containerId != View.NO_ID) {
fragment = findFragmentById(containerId);
}
if (FragmentManagerImpl.DEBUG) Log.v(TAG, "onCreateView: id=0x"
+ Integer.toHexString(id) + " fname=" + fname
+ " existing=" + fragment);
if (fragment == null) {
fragment = Fragment.instantiate(context, fname);
fragment.mFromLayout = true;
fragment.mFragmentId = id != 0 ? id : containerId;
fragment.mContainerId = containerId;
fragment.mTag = tag;
fragment.mInLayout = true;
fragment.mFragmentManager = this;
fragment.mHost = mHost;
fragment.onInflate(mHost.getContext(), attrs, fragment.mSavedFragmentState);
addFragment(fragment, true);
} else if (fragment.mInLayout) {
// A fragment already exists and it is not one we restored from
// previous state.
throw new IllegalArgumentException(attrs.getPositionDescription()
+ ": Duplicate id 0x" + Integer.toHexString(id)
+ ", tag " + tag + ", or parent id 0x" + Integer.toHexString(containerId)
+ " with another fragment for " + fname);
} else {
// This fragment was retained from a previous instance; get it
// going now.
fragment.mInLayout = true;
// If this fragment is newly instantiated (either right now, or
// from last saved state), then give it the attributes to
// initialize itself.
if (!fragment.mRetaining) {
fragment.onInflate(mHost.getContext(), attrs, fragment.mSavedFragmentState);
}
}
// If we haven't finished entering the CREATED state ourselves yet,
// push the inflated child fragment along.
if (mCurState < Fragment.CREATED && fragment.mFromLayout) {
moveToState(fragment, Fragment.CREATED, 0, 0, false);
} else {
moveToState(fragment);
}
if (fragment.mView == null) {
throw new IllegalStateException("Fragment " + fname
+ " did not create a view.");
}
if (id != 0) {
fragment.mView.setId(id);
}
if (fragment.mView.getTag() == null) {
fragment.mView.setTag(tag);
}
return fragment.mView;
}
LayoutInflaterFactory getLayoutInflaterFactory() {
return this;
}
static class FragmentTag {
public static final int[] Fragment = {
0x01010003, 0x010100d0, 0x010100d1
};
public static final int Fragment_id = 1;
public static final int Fragment_name = 0;
public static final int Fragment_tag = 2;
}
}
| v4/java/android/support/v4/app/FragmentManager.java | /*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.support.v4.app;
import android.content.Context;
import android.content.res.Configuration;
import android.content.res.TypedArray;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.os.Parcel;
import android.os.Parcelable;
import android.support.annotation.CallSuper;
import android.support.annotation.IdRes;
import android.support.annotation.StringRes;
import android.support.v4.util.DebugUtils;
import android.support.v4.util.LogWriter;
import android.support.v4.view.LayoutInflaterFactory;
import android.support.v4.view.ViewCompat;
import android.util.AttributeSet;
import android.util.Log;
import android.util.SparseArray;
import android.view.animation.AccelerateInterpolator;
import android.view.animation.AlphaAnimation;
import android.view.animation.Animation;
import android.view.animation.AnimationSet;
import android.view.animation.AnimationUtils;
import android.view.animation.DecelerateInterpolator;
import android.view.animation.Interpolator;
import android.view.animation.ScaleAnimation;
import android.view.animation.Animation.AnimationListener;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import java.io.FileDescriptor;
import java.io.PrintWriter;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Static library support version of the framework's {@link android.app.FragmentManager}.
* Used to write apps that run on platforms prior to Android 3.0. When running
* on Android 3.0 or above, this implementation is still used; it does not try
* to switch to the framework's implementation. See the framework {@link FragmentManager}
* documentation for a class overview.
*
* <p>Your activity must derive from {@link FragmentActivity} to use this. From such an activity,
* you can acquire the {@link FragmentManager} by calling
* {@link FragmentActivity#getSupportFragmentManager}.
*/
public abstract class FragmentManager {
/**
* Representation of an entry on the fragment back stack, as created
* with {@link FragmentTransaction#addToBackStack(String)
* FragmentTransaction.addToBackStack()}. Entries can later be
* retrieved with {@link FragmentManager#getBackStackEntryAt(int)
* FragmentManager.getBackStackEntry()}.
*
* <p>Note that you should never hold on to a BackStackEntry object;
* the identifier as returned by {@link #getId} is the only thing that
* will be persisted across activity instances.
*/
public interface BackStackEntry {
/**
* Return the unique identifier for the entry. This is the only
* representation of the entry that will persist across activity
* instances.
*/
public int getId();
/**
* Get the name that was supplied to
* {@link FragmentTransaction#addToBackStack(String)
* FragmentTransaction.addToBackStack(String)} when creating this entry.
*/
public String getName();
/**
* Return the full bread crumb title resource identifier for the entry,
* or 0 if it does not have one.
*/
@StringRes
public int getBreadCrumbTitleRes();
/**
* Return the short bread crumb title resource identifier for the entry,
* or 0 if it does not have one.
*/
@StringRes
public int getBreadCrumbShortTitleRes();
/**
* Return the full bread crumb title for the entry, or null if it
* does not have one.
*/
public CharSequence getBreadCrumbTitle();
/**
* Return the short bread crumb title for the entry, or null if it
* does not have one.
*/
public CharSequence getBreadCrumbShortTitle();
}
/**
* Interface to watch for changes to the back stack.
*/
public interface OnBackStackChangedListener {
/**
* Called whenever the contents of the back stack change.
*/
public void onBackStackChanged();
}
/**
* Start a series of edit operations on the Fragments associated with
* this FragmentManager.
*
* <p>Note: A fragment transaction can only be created/committed prior
* to an activity saving its state. If you try to commit a transaction
* after {@link FragmentActivity#onSaveInstanceState FragmentActivity.onSaveInstanceState()}
* (and prior to a following {@link FragmentActivity#onStart FragmentActivity.onStart}
* or {@link FragmentActivity#onResume FragmentActivity.onResume()}, you will get an error.
* This is because the framework takes care of saving your current fragments
* in the state, and if changes are made after the state is saved then they
* will be lost.</p>
*/
public abstract FragmentTransaction beginTransaction();
/** @hide -- remove once prebuilts are in. */
@Deprecated
public FragmentTransaction openTransaction() {
return beginTransaction();
}
/**
* After a {@link FragmentTransaction} is committed with
* {@link FragmentTransaction#commit FragmentTransaction.commit()}, it
* is scheduled to be executed asynchronously on the process's main thread.
* If you want to immediately executing any such pending operations, you
* can call this function (only from the main thread) to do so. Note that
* all callbacks and other related behavior will be done from within this
* call, so be careful about where this is called from.
*
* @return Returns true if there were any pending transactions to be
* executed.
*/
public abstract boolean executePendingTransactions();
/**
* Finds a fragment that was identified by the given id either when inflated
* from XML or as the container ID when added in a transaction. This first
* searches through fragments that are currently added to the manager's
* activity; if no such fragment is found, then all fragments currently
* on the back stack associated with this ID are searched.
* @return The fragment if found or null otherwise.
*/
public abstract Fragment findFragmentById(@IdRes int id);
/**
* Finds a fragment that was identified by the given tag either when inflated
* from XML or as supplied when added in a transaction. This first
* searches through fragments that are currently added to the manager's
* activity; if no such fragment is found, then all fragments currently
* on the back stack are searched.
* @return The fragment if found or null otherwise.
*/
public abstract Fragment findFragmentByTag(String tag);
/**
* Flag for {@link #popBackStack(String, int)}
* and {@link #popBackStack(int, int)}: If set, and the name or ID of
* a back stack entry has been supplied, then all matching entries will
* be consumed until one that doesn't match is found or the bottom of
* the stack is reached. Otherwise, all entries up to but not including that entry
* will be removed.
*/
public static final int POP_BACK_STACK_INCLUSIVE = 1<<0;
/**
* Pop the top state off the back stack. Returns true if there was one
* to pop, else false. This function is asynchronous -- it enqueues the
* request to pop, but the action will not be performed until the application
* returns to its event loop.
*/
public abstract void popBackStack();
/**
* Like {@link #popBackStack()}, but performs the operation immediately
* inside of the call. This is like calling {@link #executePendingTransactions()}
* afterwards.
* @return Returns true if there was something popped, else false.
*/
public abstract boolean popBackStackImmediate();
/**
* Pop the last fragment transition from the manager's fragment
* back stack. If there is nothing to pop, false is returned.
* This function is asynchronous -- it enqueues the
* request to pop, but the action will not be performed until the application
* returns to its event loop.
*
* @param name If non-null, this is the name of a previous back state
* to look for; if found, all states up to that state will be popped. The
* {@link #POP_BACK_STACK_INCLUSIVE} flag can be used to control whether
* the named state itself is popped. If null, only the top state is popped.
* @param flags Either 0 or {@link #POP_BACK_STACK_INCLUSIVE}.
*/
public abstract void popBackStack(String name, int flags);
/**
* Like {@link #popBackStack(String, int)}, but performs the operation immediately
* inside of the call. This is like calling {@link #executePendingTransactions()}
* afterwards.
* @return Returns true if there was something popped, else false.
*/
public abstract boolean popBackStackImmediate(String name, int flags);
/**
* Pop all back stack states up to the one with the given identifier.
* This function is asynchronous -- it enqueues the
* request to pop, but the action will not be performed until the application
* returns to its event loop.
*
* @param id Identifier of the stated to be popped. If no identifier exists,
* false is returned.
* The identifier is the number returned by
* {@link FragmentTransaction#commit() FragmentTransaction.commit()}. The
* {@link #POP_BACK_STACK_INCLUSIVE} flag can be used to control whether
* the named state itself is popped.
* @param flags Either 0 or {@link #POP_BACK_STACK_INCLUSIVE}.
*/
public abstract void popBackStack(int id, int flags);
/**
* Like {@link #popBackStack(int, int)}, but performs the operation immediately
* inside of the call. This is like calling {@link #executePendingTransactions()}
* afterwards.
* @return Returns true if there was something popped, else false.
*/
public abstract boolean popBackStackImmediate(int id, int flags);
/**
* Return the number of entries currently in the back stack.
*/
public abstract int getBackStackEntryCount();
/**
* Return the BackStackEntry at index <var>index</var> in the back stack;
* entries start index 0 being the bottom of the stack.
*/
public abstract BackStackEntry getBackStackEntryAt(int index);
/**
* Add a new listener for changes to the fragment back stack.
*/
public abstract void addOnBackStackChangedListener(OnBackStackChangedListener listener);
/**
* Remove a listener that was previously added with
* {@link #addOnBackStackChangedListener(OnBackStackChangedListener)}.
*/
public abstract void removeOnBackStackChangedListener(OnBackStackChangedListener listener);
/**
* Put a reference to a fragment in a Bundle. This Bundle can be
* persisted as saved state, and when later restoring
* {@link #getFragment(Bundle, String)} will return the current
* instance of the same fragment.
*
* @param bundle The bundle in which to put the fragment reference.
* @param key The name of the entry in the bundle.
* @param fragment The Fragment whose reference is to be stored.
*/
public abstract void putFragment(Bundle bundle, String key, Fragment fragment);
/**
* Retrieve the current Fragment instance for a reference previously
* placed with {@link #putFragment(Bundle, String, Fragment)}.
*
* @param bundle The bundle from which to retrieve the fragment reference.
* @param key The name of the entry in the bundle.
* @return Returns the current Fragment instance that is associated with
* the given reference.
*/
public abstract Fragment getFragment(Bundle bundle, String key);
/**
* Get a list of all fragments that have been added to the fragment manager.
*
* @return The list of all fragments or null if none.
* @hide
*/
public abstract List<Fragment> getFragments();
/**
* Save the current instance state of the given Fragment. This can be
* used later when creating a new instance of the Fragment and adding
* it to the fragment manager, to have it create itself to match the
* current state returned here. Note that there are limits on how
* this can be used:
*
* <ul>
* <li>The Fragment must currently be attached to the FragmentManager.
* <li>A new Fragment created using this saved state must be the same class
* type as the Fragment it was created from.
* <li>The saved state can not contain dependencies on other fragments --
* that is it can't use {@link #putFragment(Bundle, String, Fragment)} to
* store a fragment reference because that reference may not be valid when
* this saved state is later used. Likewise the Fragment's target and
* result code are not included in this state.
* </ul>
*
* @param f The Fragment whose state is to be saved.
* @return The generated state. This will be null if there was no
* interesting state created by the fragment.
*/
public abstract Fragment.SavedState saveFragmentInstanceState(Fragment f);
/**
* Returns true if the final {@link android.app.Activity#onDestroy() Activity.onDestroy()}
* call has been made on the FragmentManager's Activity, so this instance is now dead.
*/
public abstract boolean isDestroyed();
/**
* Print the FragmentManager's state into the given stream.
*
* @param prefix Text to print at the front of each line.
* @param fd The raw file descriptor that the dump is being sent to.
* @param writer A PrintWriter to which the dump is to be set.
* @param args Additional arguments to the dump request.
*/
public abstract void dump(String prefix, FileDescriptor fd, PrintWriter writer, String[] args);
/**
* Control whether the framework's internal fragment manager debugging
* logs are turned on. If enabled, you will see output in logcat as
* the framework performs fragment operations.
*/
public static void enableDebugLogging(boolean enabled) {
FragmentManagerImpl.DEBUG = enabled;
}
}
final class FragmentManagerState implements Parcelable {
FragmentState[] mActive;
int[] mAdded;
BackStackState[] mBackStack;
public FragmentManagerState() {
}
public FragmentManagerState(Parcel in) {
mActive = in.createTypedArray(FragmentState.CREATOR);
mAdded = in.createIntArray();
mBackStack = in.createTypedArray(BackStackState.CREATOR);
}
public int describeContents() {
return 0;
}
public void writeToParcel(Parcel dest, int flags) {
dest.writeTypedArray(mActive, flags);
dest.writeIntArray(mAdded);
dest.writeTypedArray(mBackStack, flags);
}
public static final Parcelable.Creator<FragmentManagerState> CREATOR
= new Parcelable.Creator<FragmentManagerState>() {
public FragmentManagerState createFromParcel(Parcel in) {
return new FragmentManagerState(in);
}
public FragmentManagerState[] newArray(int size) {
return new FragmentManagerState[size];
}
};
}
/**
* Container for fragments associated with an activity.
*/
final class FragmentManagerImpl extends FragmentManager implements LayoutInflaterFactory {
static boolean DEBUG = false;
static final String TAG = "FragmentManager";
static final boolean HONEYCOMB = android.os.Build.VERSION.SDK_INT >= 11;
static final String TARGET_REQUEST_CODE_STATE_TAG = "android:target_req_state";
static final String TARGET_STATE_TAG = "android:target_state";
static final String VIEW_STATE_TAG = "android:view_state";
static final String USER_VISIBLE_HINT_TAG = "android:user_visible_hint";
static class AnimateOnHWLayerIfNeededListener implements AnimationListener {
private AnimationListener mOrignalListener = null;
private boolean mShouldRunOnHWLayer = false;
private View mView = null;
public AnimateOnHWLayerIfNeededListener(final View v, Animation anim) {
if (v == null || anim == null) {
return;
}
mView = v;
}
public AnimateOnHWLayerIfNeededListener(final View v, Animation anim,
AnimationListener listener) {
if (v == null || anim == null) {
return;
}
mOrignalListener = listener;
mView = v;
}
@Override
@CallSuper
public void onAnimationStart(Animation animation) {
if (mView != null) {
mShouldRunOnHWLayer = shouldRunOnHWLayer(mView, animation);
if (mShouldRunOnHWLayer) {
mView.post(new Runnable() {
@Override
public void run() {
ViewCompat.setLayerType(mView, ViewCompat.LAYER_TYPE_HARDWARE, null);
}
});
}
}
if (mOrignalListener != null) {
mOrignalListener.onAnimationStart(animation);
}
}
@Override
@CallSuper
public void onAnimationEnd(Animation animation) {
if (mView != null && mShouldRunOnHWLayer) {
mView.post(new Runnable() {
@Override
public void run() {
ViewCompat.setLayerType(mView, ViewCompat.LAYER_TYPE_NONE, null);
}
});
}
if (mOrignalListener != null) {
mOrignalListener.onAnimationEnd(animation);
}
}
@Override
public void onAnimationRepeat(Animation animation) {
if (mOrignalListener != null) {
mOrignalListener.onAnimationRepeat(animation);
}
}
}
ArrayList<Runnable> mPendingActions;
Runnable[] mTmpActions;
boolean mExecutingActions;
ArrayList<Fragment> mActive;
ArrayList<Fragment> mAdded;
ArrayList<Integer> mAvailIndices;
ArrayList<BackStackRecord> mBackStack;
ArrayList<Fragment> mCreatedMenus;
// Must be accessed while locked.
ArrayList<BackStackRecord> mBackStackIndices;
ArrayList<Integer> mAvailBackStackIndices;
ArrayList<OnBackStackChangedListener> mBackStackChangeListeners;
int mCurState = Fragment.INITIALIZING;
FragmentHostCallback mHost;
FragmentController mController;
FragmentContainer mContainer;
Fragment mParent;
static Field sAnimationListenerField = null;
boolean mNeedMenuInvalidate;
boolean mStateSaved;
boolean mDestroyed;
String mNoTransactionsBecause;
boolean mHavePendingDeferredStart;
// Temporary vars for state save and restore.
Bundle mStateBundle = null;
SparseArray<Parcelable> mStateArray = null;
Runnable mExecCommit = new Runnable() {
@Override
public void run() {
execPendingActions();
}
};
static boolean modifiesAlpha(Animation anim) {
if (anim instanceof AlphaAnimation) {
return true;
} else if (anim instanceof AnimationSet) {
List<Animation> anims = ((AnimationSet) anim).getAnimations();
for (int i = 0; i < anims.size(); i++) {
if (anims.get(i) instanceof AlphaAnimation) {
return true;
}
}
}
return false;
}
static boolean shouldRunOnHWLayer(View v, Animation anim) {
return ViewCompat.getLayerType(v) == ViewCompat.LAYER_TYPE_NONE
&& ViewCompat.hasOverlappingRendering(v)
&& modifiesAlpha(anim);
}
private void throwException(RuntimeException ex) {
Log.e(TAG, ex.getMessage());
Log.e(TAG, "Activity state:");
LogWriter logw = new LogWriter(TAG);
PrintWriter pw = new PrintWriter(logw);
if (mHost != null) {
try {
mHost.onDump(" ", null, pw, new String[] { });
} catch (Exception e) {
Log.e(TAG, "Failed dumping state", e);
}
} else {
try {
dump(" ", null, pw, new String[] { });
} catch (Exception e) {
Log.e(TAG, "Failed dumping state", e);
}
}
throw ex;
}
@Override
public FragmentTransaction beginTransaction() {
return new BackStackRecord(this);
}
@Override
public boolean executePendingTransactions() {
return execPendingActions();
}
@Override
public void popBackStack() {
enqueueAction(new Runnable() {
@Override public void run() {
popBackStackState(mHost.getHandler(), null, -1, 0);
}
}, false);
}
@Override
public boolean popBackStackImmediate() {
checkStateLoss();
executePendingTransactions();
return popBackStackState(mHost.getHandler(), null, -1, 0);
}
@Override
public void popBackStack(final String name, final int flags) {
enqueueAction(new Runnable() {
@Override public void run() {
popBackStackState(mHost.getHandler(), name, -1, flags);
}
}, false);
}
@Override
public boolean popBackStackImmediate(String name, int flags) {
checkStateLoss();
executePendingTransactions();
return popBackStackState(mHost.getHandler(), name, -1, flags);
}
@Override
public void popBackStack(final int id, final int flags) {
if (id < 0) {
throw new IllegalArgumentException("Bad id: " + id);
}
enqueueAction(new Runnable() {
@Override public void run() {
popBackStackState(mHost.getHandler(), null, id, flags);
}
}, false);
}
@Override
public boolean popBackStackImmediate(int id, int flags) {
checkStateLoss();
executePendingTransactions();
if (id < 0) {
throw new IllegalArgumentException("Bad id: " + id);
}
return popBackStackState(mHost.getHandler(), null, id, flags);
}
@Override
public int getBackStackEntryCount() {
return mBackStack != null ? mBackStack.size() : 0;
}
@Override
public BackStackEntry getBackStackEntryAt(int index) {
return mBackStack.get(index);
}
@Override
public void addOnBackStackChangedListener(OnBackStackChangedListener listener) {
if (mBackStackChangeListeners == null) {
mBackStackChangeListeners = new ArrayList<OnBackStackChangedListener>();
}
mBackStackChangeListeners.add(listener);
}
@Override
public void removeOnBackStackChangedListener(OnBackStackChangedListener listener) {
if (mBackStackChangeListeners != null) {
mBackStackChangeListeners.remove(listener);
}
}
@Override
public void putFragment(Bundle bundle, String key, Fragment fragment) {
if (fragment.mIndex < 0) {
throwException(new IllegalStateException("Fragment " + fragment
+ " is not currently in the FragmentManager"));
}
bundle.putInt(key, fragment.mIndex);
}
@Override
public Fragment getFragment(Bundle bundle, String key) {
int index = bundle.getInt(key, -1);
if (index == -1) {
return null;
}
if (index >= mActive.size()) {
throwException(new IllegalStateException("Fragment no longer exists for key "
+ key + ": index " + index));
}
Fragment f = mActive.get(index);
if (f == null) {
throwException(new IllegalStateException("Fragment no longer exists for key "
+ key + ": index " + index));
}
return f;
}
@Override
public List<Fragment> getFragments() {
return mActive;
}
@Override
public Fragment.SavedState saveFragmentInstanceState(Fragment fragment) {
if (fragment.mIndex < 0) {
throwException( new IllegalStateException("Fragment " + fragment
+ " is not currently in the FragmentManager"));
}
if (fragment.mState > Fragment.INITIALIZING) {
Bundle result = saveFragmentBasicState(fragment);
return result != null ? new Fragment.SavedState(result) : null;
}
return null;
}
@Override
public boolean isDestroyed() {
return mDestroyed;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder(128);
sb.append("FragmentManager{");
sb.append(Integer.toHexString(System.identityHashCode(this)));
sb.append(" in ");
if (mParent != null) {
DebugUtils.buildShortClassTag(mParent, sb);
} else {
DebugUtils.buildShortClassTag(mHost, sb);
}
sb.append("}}");
return sb.toString();
}
@Override
public void dump(String prefix, FileDescriptor fd, PrintWriter writer, String[] args) {
String innerPrefix = prefix + " ";
int N;
if (mActive != null) {
N = mActive.size();
if (N > 0) {
writer.print(prefix); writer.print("Active Fragments in ");
writer.print(Integer.toHexString(System.identityHashCode(this)));
writer.println(":");
for (int i=0; i<N; i++) {
Fragment f = mActive.get(i);
writer.print(prefix); writer.print(" #"); writer.print(i);
writer.print(": "); writer.println(f);
if (f != null) {
f.dump(innerPrefix, fd, writer, args);
}
}
}
}
if (mAdded != null) {
N = mAdded.size();
if (N > 0) {
writer.print(prefix); writer.println("Added Fragments:");
for (int i=0; i<N; i++) {
Fragment f = mAdded.get(i);
writer.print(prefix); writer.print(" #"); writer.print(i);
writer.print(": "); writer.println(f.toString());
}
}
}
if (mCreatedMenus != null) {
N = mCreatedMenus.size();
if (N > 0) {
writer.print(prefix); writer.println("Fragments Created Menus:");
for (int i=0; i<N; i++) {
Fragment f = mCreatedMenus.get(i);
writer.print(prefix); writer.print(" #"); writer.print(i);
writer.print(": "); writer.println(f.toString());
}
}
}
if (mBackStack != null) {
N = mBackStack.size();
if (N > 0) {
writer.print(prefix); writer.println("Back Stack:");
for (int i=0; i<N; i++) {
BackStackRecord bs = mBackStack.get(i);
writer.print(prefix); writer.print(" #"); writer.print(i);
writer.print(": "); writer.println(bs.toString());
bs.dump(innerPrefix, fd, writer, args);
}
}
}
synchronized (this) {
if (mBackStackIndices != null) {
N = mBackStackIndices.size();
if (N > 0) {
writer.print(prefix); writer.println("Back Stack Indices:");
for (int i=0; i<N; i++) {
BackStackRecord bs = mBackStackIndices.get(i);
writer.print(prefix); writer.print(" #"); writer.print(i);
writer.print(": "); writer.println(bs);
}
}
}
if (mAvailBackStackIndices != null && mAvailBackStackIndices.size() > 0) {
writer.print(prefix); writer.print("mAvailBackStackIndices: ");
writer.println(Arrays.toString(mAvailBackStackIndices.toArray()));
}
}
if (mPendingActions != null) {
N = mPendingActions.size();
if (N > 0) {
writer.print(prefix); writer.println("Pending Actions:");
for (int i=0; i<N; i++) {
Runnable r = mPendingActions.get(i);
writer.print(prefix); writer.print(" #"); writer.print(i);
writer.print(": "); writer.println(r);
}
}
}
writer.print(prefix); writer.println("FragmentManager misc state:");
writer.print(prefix); writer.print(" mHost="); writer.println(mHost);
writer.print(prefix); writer.print(" mContainer="); writer.println(mContainer);
if (mParent != null) {
writer.print(prefix); writer.print(" mParent="); writer.println(mParent);
}
writer.print(prefix); writer.print(" mCurState="); writer.print(mCurState);
writer.print(" mStateSaved="); writer.print(mStateSaved);
writer.print(" mDestroyed="); writer.println(mDestroyed);
if (mNeedMenuInvalidate) {
writer.print(prefix); writer.print(" mNeedMenuInvalidate=");
writer.println(mNeedMenuInvalidate);
}
if (mNoTransactionsBecause != null) {
writer.print(prefix); writer.print(" mNoTransactionsBecause=");
writer.println(mNoTransactionsBecause);
}
if (mAvailIndices != null && mAvailIndices.size() > 0) {
writer.print(prefix); writer.print(" mAvailIndices: ");
writer.println(Arrays.toString(mAvailIndices.toArray()));
}
}
static final Interpolator DECELERATE_QUINT = new DecelerateInterpolator(2.5f);
static final Interpolator DECELERATE_CUBIC = new DecelerateInterpolator(1.5f);
static final Interpolator ACCELERATE_QUINT = new AccelerateInterpolator(2.5f);
static final Interpolator ACCELERATE_CUBIC = new AccelerateInterpolator(1.5f);
static final int ANIM_DUR = 220;
static Animation makeOpenCloseAnimation(Context context, float startScale,
float endScale, float startAlpha, float endAlpha) {
AnimationSet set = new AnimationSet(false);
ScaleAnimation scale = new ScaleAnimation(startScale, endScale, startScale, endScale,
Animation.RELATIVE_TO_SELF, .5f, Animation.RELATIVE_TO_SELF, .5f);
scale.setInterpolator(DECELERATE_QUINT);
scale.setDuration(ANIM_DUR);
set.addAnimation(scale);
AlphaAnimation alpha = new AlphaAnimation(startAlpha, endAlpha);
alpha.setInterpolator(DECELERATE_CUBIC);
alpha.setDuration(ANIM_DUR);
set.addAnimation(alpha);
return set;
}
static Animation makeFadeAnimation(Context context, float start, float end) {
AlphaAnimation anim = new AlphaAnimation(start, end);
anim.setInterpolator(DECELERATE_CUBIC);
anim.setDuration(ANIM_DUR);
return anim;
}
Animation loadAnimation(Fragment fragment, int transit, boolean enter,
int transitionStyle) {
Animation animObj = fragment.onCreateAnimation(transit, enter,
fragment.mNextAnim);
if (animObj != null) {
return animObj;
}
if (fragment.mNextAnim != 0) {
Animation anim = AnimationUtils.loadAnimation(mHost.getContext(), fragment.mNextAnim);
if (anim != null) {
return anim;
}
}
if (transit == 0) {
return null;
}
int styleIndex = transitToStyleIndex(transit, enter);
if (styleIndex < 0) {
return null;
}
switch (styleIndex) {
case ANIM_STYLE_OPEN_ENTER:
return makeOpenCloseAnimation(mHost.getContext(), 1.125f, 1.0f, 0, 1);
case ANIM_STYLE_OPEN_EXIT:
return makeOpenCloseAnimation(mHost.getContext(), 1.0f, .975f, 1, 0);
case ANIM_STYLE_CLOSE_ENTER:
return makeOpenCloseAnimation(mHost.getContext(), .975f, 1.0f, 0, 1);
case ANIM_STYLE_CLOSE_EXIT:
return makeOpenCloseAnimation(mHost.getContext(), 1.0f, 1.075f, 1, 0);
case ANIM_STYLE_FADE_ENTER:
return makeFadeAnimation(mHost.getContext(), 0, 1);
case ANIM_STYLE_FADE_EXIT:
return makeFadeAnimation(mHost.getContext(), 1, 0);
}
if (transitionStyle == 0 && mHost.onHasWindowAnimations()) {
transitionStyle = mHost.onGetWindowAnimations();
}
if (transitionStyle == 0) {
return null;
}
//TypedArray attrs = mActivity.obtainStyledAttributes(transitionStyle,
// com.android.internal.R.styleable.FragmentAnimation);
//int anim = attrs.getResourceId(styleIndex, 0);
//attrs.recycle();
//if (anim == 0) {
// return null;
//}
//return AnimatorInflater.loadAnimator(mActivity, anim);
return null;
}
public void performPendingDeferredStart(Fragment f) {
if (f.mDeferStart) {
if (mExecutingActions) {
// Wait until we're done executing our pending transactions
mHavePendingDeferredStart = true;
return;
}
f.mDeferStart = false;
moveToState(f, mCurState, 0, 0, false);
}
}
/**
* Sets the to be animated view on hardware layer during the animation. Note
* that calling this will replace any existing animation listener on the animation
* with a new one, as animations do not support more than one listeners. Therefore,
* animations that already have listeners should do the layer change operations
* in their existing listeners, rather than calling this function.
*/
private void setHWLayerAnimListenerIfAlpha(final View v, Animation anim) {
if (v == null || anim == null) {
return;
}
if (shouldRunOnHWLayer(v, anim)) {
AnimationListener originalListener = null;
try {
if (sAnimationListenerField == null) {
sAnimationListenerField = Animation.class.getDeclaredField("mListener");
sAnimationListenerField.setAccessible(true);
}
originalListener = (AnimationListener) sAnimationListenerField.get(anim);
} catch (NoSuchFieldException e) {
Log.e(TAG, "No field with the name mListener is found in Animation class", e);
} catch (IllegalAccessException e) {
Log.e(TAG, "Cannot access Animation's mListener field", e);
}
// If there's already a listener set on the animation, we need wrap the new listener
// around the existing listener, so that they will both get animation listener
// callbacks.
anim.setAnimationListener(new AnimateOnHWLayerIfNeededListener(v, anim,
originalListener));
}
}
void moveToState(Fragment f, int newState, int transit, int transitionStyle,
boolean keepActive) {
// Fragments that are not currently added will sit in the onCreate() state.
if ((!f.mAdded || f.mDetached) && newState > Fragment.CREATED) {
newState = Fragment.CREATED;
}
if (f.mRemoving && newState > f.mState) {
// While removing a fragment, we can't change it to a higher state.
newState = f.mState;
}
// Defer start if requested; don't allow it to move to STARTED or higher
// if it's not already started.
if (f.mDeferStart && f.mState < Fragment.STARTED && newState > Fragment.STOPPED) {
newState = Fragment.STOPPED;
}
if (f.mState < newState) {
// For fragments that are created from a layout, when restoring from
// state we don't want to allow them to be created until they are
// being reloaded from the layout.
if (f.mFromLayout && !f.mInLayout) {
return;
}
if (f.mAnimatingAway != null) {
// The fragment is currently being animated... but! Now we
// want to move our state back up. Give up on waiting for the
// animation, move to whatever the final state should be once
// the animation is done, and then we can proceed from there.
f.mAnimatingAway = null;
moveToState(f, f.mStateAfterAnimating, 0, 0, true);
}
switch (f.mState) {
case Fragment.INITIALIZING:
if (DEBUG) Log.v(TAG, "moveto CREATED: " + f);
if (f.mSavedFragmentState != null) {
f.mSavedFragmentState.setClassLoader(mHost.getContext().getClassLoader());
f.mSavedViewState = f.mSavedFragmentState.getSparseParcelableArray(
FragmentManagerImpl.VIEW_STATE_TAG);
f.mTarget = getFragment(f.mSavedFragmentState,
FragmentManagerImpl.TARGET_STATE_TAG);
if (f.mTarget != null) {
f.mTargetRequestCode = f.mSavedFragmentState.getInt(
FragmentManagerImpl.TARGET_REQUEST_CODE_STATE_TAG, 0);
}
f.mUserVisibleHint = f.mSavedFragmentState.getBoolean(
FragmentManagerImpl.USER_VISIBLE_HINT_TAG, true);
if (!f.mUserVisibleHint) {
f.mDeferStart = true;
if (newState > Fragment.STOPPED) {
newState = Fragment.STOPPED;
}
}
}
f.mHost = mHost;
f.mParentFragment = mParent;
f.mFragmentManager = mParent != null
? mParent.mChildFragmentManager : mHost.getFragmentManagerImpl();
f.mCalled = false;
f.onAttach(mHost.getContext());
if (!f.mCalled) {
throw new SuperNotCalledException("Fragment " + f
+ " did not call through to super.onAttach()");
}
if (f.mParentFragment == null) {
mHost.onAttachFragment(f);
}
if (!f.mRetaining) {
f.performCreate(f.mSavedFragmentState);
}
f.mRetaining = false;
if (f.mFromLayout) {
// For fragments that are part of the content view
// layout, we need to instantiate the view immediately
// and the inflater will take care of adding it.
f.mView = f.performCreateView(f.getLayoutInflater(
f.mSavedFragmentState), null, f.mSavedFragmentState);
if (f.mView != null) {
f.mInnerView = f.mView;
if (Build.VERSION.SDK_INT >= 11) {
ViewCompat.setSaveFromParentEnabled(f.mView, false);
} else {
f.mView = NoSaveStateFrameLayout.wrap(f.mView);
}
if (f.mHidden) f.mView.setVisibility(View.GONE);
f.onViewCreated(f.mView, f.mSavedFragmentState);
} else {
f.mInnerView = null;
}
}
case Fragment.CREATED:
if (newState > Fragment.CREATED) {
if (DEBUG) Log.v(TAG, "moveto ACTIVITY_CREATED: " + f);
if (!f.mFromLayout) {
ViewGroup container = null;
if (f.mContainerId != 0) {
container = (ViewGroup)mContainer.onFindViewById(f.mContainerId);
if (container == null && !f.mRestored) {
throwException(new IllegalArgumentException(
"No view found for id 0x"
+ Integer.toHexString(f.mContainerId) + " ("
+ f.getResources().getResourceName(f.mContainerId)
+ ") for fragment " + f));
}
}
f.mContainer = container;
f.mView = f.performCreateView(f.getLayoutInflater(
f.mSavedFragmentState), container, f.mSavedFragmentState);
if (f.mView != null) {
f.mInnerView = f.mView;
if (Build.VERSION.SDK_INT >= 11) {
ViewCompat.setSaveFromParentEnabled(f.mView, false);
} else {
f.mView = NoSaveStateFrameLayout.wrap(f.mView);
}
if (container != null) {
Animation anim = loadAnimation(f, transit, true,
transitionStyle);
if (anim != null) {
setHWLayerAnimListenerIfAlpha(f.mView, anim);
f.mView.startAnimation(anim);
}
container.addView(f.mView);
}
if (f.mHidden) f.mView.setVisibility(View.GONE);
f.onViewCreated(f.mView, f.mSavedFragmentState);
} else {
f.mInnerView = null;
}
}
f.performActivityCreated(f.mSavedFragmentState);
if (f.mView != null) {
f.restoreViewState(f.mSavedFragmentState);
}
f.mSavedFragmentState = null;
}
case Fragment.ACTIVITY_CREATED:
case Fragment.STOPPED:
if (newState > Fragment.STOPPED) {
if (DEBUG) Log.v(TAG, "moveto STARTED: " + f);
f.performStart();
}
case Fragment.STARTED:
if (newState > Fragment.STARTED) {
if (DEBUG) Log.v(TAG, "moveto RESUMED: " + f);
f.mResumed = true;
f.performResume();
f.mSavedFragmentState = null;
f.mSavedViewState = null;
}
}
} else if (f.mState > newState) {
switch (f.mState) {
case Fragment.RESUMED:
if (newState < Fragment.RESUMED) {
if (DEBUG) Log.v(TAG, "movefrom RESUMED: " + f);
f.performPause();
f.mResumed = false;
}
case Fragment.STARTED:
if (newState < Fragment.STARTED) {
if (DEBUG) Log.v(TAG, "movefrom STARTED: " + f);
f.performStop();
}
case Fragment.STOPPED:
if (newState < Fragment.STOPPED) {
if (DEBUG) Log.v(TAG, "movefrom STOPPED: " + f);
f.performReallyStop();
}
case Fragment.ACTIVITY_CREATED:
if (newState < Fragment.ACTIVITY_CREATED) {
if (DEBUG) Log.v(TAG, "movefrom ACTIVITY_CREATED: " + f);
if (f.mView != null) {
// Need to save the current view state if not
// done already.
if (mHost.onShouldSaveFragmentState(f) && f.mSavedViewState == null) {
saveFragmentViewState(f);
}
}
f.performDestroyView();
if (f.mView != null && f.mContainer != null) {
Animation anim = null;
if (mCurState > Fragment.INITIALIZING && !mDestroyed) {
anim = loadAnimation(f, transit, false,
transitionStyle);
}
if (anim != null) {
final Fragment fragment = f;
f.mAnimatingAway = f.mView;
f.mStateAfterAnimating = newState;
final View viewToAnimate = f.mView;
anim.setAnimationListener(new AnimateOnHWLayerIfNeededListener(
viewToAnimate, anim) {
@Override
public void onAnimationEnd(Animation animation) {
super.onAnimationEnd(animation);
if (fragment.mAnimatingAway != null) {
fragment.mAnimatingAway = null;
moveToState(fragment, fragment.mStateAfterAnimating,
0, 0, false);
}
}
});
f.mView.startAnimation(anim);
}
f.mContainer.removeView(f.mView);
}
f.mContainer = null;
f.mView = null;
f.mInnerView = null;
}
case Fragment.CREATED:
if (newState < Fragment.CREATED) {
if (mDestroyed) {
if (f.mAnimatingAway != null) {
// The fragment's containing activity is
// being destroyed, but this fragment is
// currently animating away. Stop the
// animation right now -- it is not needed,
// and we can't wait any more on destroying
// the fragment.
View v = f.mAnimatingAway;
f.mAnimatingAway = null;
v.clearAnimation();
}
}
if (f.mAnimatingAway != null) {
// We are waiting for the fragment's view to finish
// animating away. Just make a note of the state
// the fragment now should move to once the animation
// is done.
f.mStateAfterAnimating = newState;
newState = Fragment.CREATED;
} else {
if (DEBUG) Log.v(TAG, "movefrom CREATED: " + f);
if (!f.mRetaining) {
f.performDestroy();
}
f.mCalled = false;
f.onDetach();
if (!f.mCalled) {
throw new SuperNotCalledException("Fragment " + f
+ " did not call through to super.onDetach()");
}
if (!keepActive) {
if (!f.mRetaining) {
makeInactive(f);
} else {
f.mHost = null;
f.mParentFragment = null;
f.mFragmentManager = null;
f.mChildFragmentManager = null;
}
}
}
}
}
}
f.mState = newState;
}
void moveToState(Fragment f) {
moveToState(f, mCurState, 0, 0, false);
}
void moveToState(int newState, boolean always) {
moveToState(newState, 0, 0, always);
}
void moveToState(int newState, int transit, int transitStyle, boolean always) {
if (mHost == null && newState != Fragment.INITIALIZING) {
throw new IllegalStateException("No host");
}
if (!always && mCurState == newState) {
return;
}
mCurState = newState;
if (mActive != null) {
boolean loadersRunning = false;
for (int i=0; i<mActive.size(); i++) {
Fragment f = mActive.get(i);
if (f != null) {
moveToState(f, newState, transit, transitStyle, false);
if (f.mLoaderManager != null) {
loadersRunning |= f.mLoaderManager.hasRunningLoaders();
}
}
}
if (!loadersRunning) {
startPendingDeferredFragments();
}
if (mNeedMenuInvalidate && mHost != null && mCurState == Fragment.RESUMED) {
mHost.onSupportInvalidateOptionsMenu();
mNeedMenuInvalidate = false;
}
}
}
void startPendingDeferredFragments() {
if (mActive == null) return;
for (int i=0; i<mActive.size(); i++) {
Fragment f = mActive.get(i);
if (f != null) {
performPendingDeferredStart(f);
}
}
}
void makeActive(Fragment f) {
if (f.mIndex >= 0) {
return;
}
if (mAvailIndices == null || mAvailIndices.size() <= 0) {
if (mActive == null) {
mActive = new ArrayList<Fragment>();
}
f.setIndex(mActive.size(), mParent);
mActive.add(f);
} else {
f.setIndex(mAvailIndices.remove(mAvailIndices.size()-1), mParent);
mActive.set(f.mIndex, f);
}
if (DEBUG) Log.v(TAG, "Allocated fragment index " + f);
}
void makeInactive(Fragment f) {
if (f.mIndex < 0) {
return;
}
if (DEBUG) Log.v(TAG, "Freeing fragment index " + f);
mActive.set(f.mIndex, null);
if (mAvailIndices == null) {
mAvailIndices = new ArrayList<Integer>();
}
mAvailIndices.add(f.mIndex);
mHost.inactivateFragment(f.mWho);
f.initState();
}
public void addFragment(Fragment fragment, boolean moveToStateNow) {
if (mAdded == null) {
mAdded = new ArrayList<Fragment>();
}
if (DEBUG) Log.v(TAG, "add: " + fragment);
makeActive(fragment);
if (!fragment.mDetached) {
if (mAdded.contains(fragment)) {
throw new IllegalStateException("Fragment already added: " + fragment);
}
mAdded.add(fragment);
fragment.mAdded = true;
fragment.mRemoving = false;
if (fragment.mHasMenu && fragment.mMenuVisible) {
mNeedMenuInvalidate = true;
}
if (moveToStateNow) {
moveToState(fragment);
}
}
}
public void removeFragment(Fragment fragment, int transition, int transitionStyle) {
if (DEBUG) Log.v(TAG, "remove: " + fragment + " nesting=" + fragment.mBackStackNesting);
final boolean inactive = !fragment.isInBackStack();
if (!fragment.mDetached || inactive) {
if (mAdded != null) {
mAdded.remove(fragment);
}
if (fragment.mHasMenu && fragment.mMenuVisible) {
mNeedMenuInvalidate = true;
}
fragment.mAdded = false;
fragment.mRemoving = true;
moveToState(fragment, inactive ? Fragment.INITIALIZING : Fragment.CREATED,
transition, transitionStyle, false);
}
}
public void hideFragment(Fragment fragment, int transition, int transitionStyle) {
if (DEBUG) Log.v(TAG, "hide: " + fragment);
if (!fragment.mHidden) {
fragment.mHidden = true;
if (fragment.mView != null) {
Animation anim = loadAnimation(fragment, transition, false,
transitionStyle);
if (anim != null) {
setHWLayerAnimListenerIfAlpha(fragment.mView, anim);
fragment.mView.startAnimation(anim);
}
fragment.mView.setVisibility(View.GONE);
}
if (fragment.mAdded && fragment.mHasMenu && fragment.mMenuVisible) {
mNeedMenuInvalidate = true;
}
fragment.onHiddenChanged(true);
}
}
public void showFragment(Fragment fragment, int transition, int transitionStyle) {
if (DEBUG) Log.v(TAG, "show: " + fragment);
if (fragment.mHidden) {
fragment.mHidden = false;
if (fragment.mView != null) {
Animation anim = loadAnimation(fragment, transition, true,
transitionStyle);
if (anim != null) {
setHWLayerAnimListenerIfAlpha(fragment.mView, anim);
fragment.mView.startAnimation(anim);
}
fragment.mView.setVisibility(View.VISIBLE);
}
if (fragment.mAdded && fragment.mHasMenu && fragment.mMenuVisible) {
mNeedMenuInvalidate = true;
}
fragment.onHiddenChanged(false);
}
}
public void detachFragment(Fragment fragment, int transition, int transitionStyle) {
if (DEBUG) Log.v(TAG, "detach: " + fragment);
if (!fragment.mDetached) {
fragment.mDetached = true;
if (fragment.mAdded) {
// We are not already in back stack, so need to remove the fragment.
if (mAdded != null) {
if (DEBUG) Log.v(TAG, "remove from detach: " + fragment);
mAdded.remove(fragment);
}
if (fragment.mHasMenu && fragment.mMenuVisible) {
mNeedMenuInvalidate = true;
}
fragment.mAdded = false;
moveToState(fragment, Fragment.CREATED, transition, transitionStyle, false);
}
}
}
public void attachFragment(Fragment fragment, int transition, int transitionStyle) {
if (DEBUG) Log.v(TAG, "attach: " + fragment);
if (fragment.mDetached) {
fragment.mDetached = false;
if (!fragment.mAdded) {
if (mAdded == null) {
mAdded = new ArrayList<Fragment>();
}
if (mAdded.contains(fragment)) {
throw new IllegalStateException("Fragment already added: " + fragment);
}
if (DEBUG) Log.v(TAG, "add from attach: " + fragment);
mAdded.add(fragment);
fragment.mAdded = true;
if (fragment.mHasMenu && fragment.mMenuVisible) {
mNeedMenuInvalidate = true;
}
moveToState(fragment, mCurState, transition, transitionStyle, false);
}
}
}
public Fragment findFragmentById(int id) {
if (mAdded != null) {
// First look through added fragments.
for (int i=mAdded.size()-1; i>=0; i--) {
Fragment f = mAdded.get(i);
if (f != null && f.mFragmentId == id) {
return f;
}
}
}
if (mActive != null) {
// Now for any known fragment.
for (int i=mActive.size()-1; i>=0; i--) {
Fragment f = mActive.get(i);
if (f != null && f.mFragmentId == id) {
return f;
}
}
}
return null;
}
public Fragment findFragmentByTag(String tag) {
if (mAdded != null && tag != null) {
// First look through added fragments.
for (int i=mAdded.size()-1; i>=0; i--) {
Fragment f = mAdded.get(i);
if (f != null && tag.equals(f.mTag)) {
return f;
}
}
}
if (mActive != null && tag != null) {
// Now for any known fragment.
for (int i=mActive.size()-1; i>=0; i--) {
Fragment f = mActive.get(i);
if (f != null && tag.equals(f.mTag)) {
return f;
}
}
}
return null;
}
public Fragment findFragmentByWho(String who) {
if (mActive != null && who != null) {
for (int i=mActive.size()-1; i>=0; i--) {
Fragment f = mActive.get(i);
if (f != null && (f=f.findFragmentByWho(who)) != null) {
return f;
}
}
}
return null;
}
private void checkStateLoss() {
if (mStateSaved) {
throw new IllegalStateException(
"Can not perform this action after onSaveInstanceState");
}
if (mNoTransactionsBecause != null) {
throw new IllegalStateException(
"Can not perform this action inside of " + mNoTransactionsBecause);
}
}
/**
* Adds an action to the queue of pending actions.
*
* @param action the action to add
* @param allowStateLoss whether to allow loss of state information
* @throws IllegalStateException if the activity has been destroyed
*/
public void enqueueAction(Runnable action, boolean allowStateLoss) {
if (!allowStateLoss) {
checkStateLoss();
}
synchronized (this) {
if (mDestroyed || mHost == null) {
throw new IllegalStateException("Activity has been destroyed");
}
if (mPendingActions == null) {
mPendingActions = new ArrayList<Runnable>();
}
mPendingActions.add(action);
if (mPendingActions.size() == 1) {
mHost.getHandler().removeCallbacks(mExecCommit);
mHost.getHandler().post(mExecCommit);
}
}
}
public int allocBackStackIndex(BackStackRecord bse) {
synchronized (this) {
if (mAvailBackStackIndices == null || mAvailBackStackIndices.size() <= 0) {
if (mBackStackIndices == null) {
mBackStackIndices = new ArrayList<BackStackRecord>();
}
int index = mBackStackIndices.size();
if (DEBUG) Log.v(TAG, "Setting back stack index " + index + " to " + bse);
mBackStackIndices.add(bse);
return index;
} else {
int index = mAvailBackStackIndices.remove(mAvailBackStackIndices.size()-1);
if (DEBUG) Log.v(TAG, "Adding back stack index " + index + " with " + bse);
mBackStackIndices.set(index, bse);
return index;
}
}
}
public void setBackStackIndex(int index, BackStackRecord bse) {
synchronized (this) {
if (mBackStackIndices == null) {
mBackStackIndices = new ArrayList<BackStackRecord>();
}
int N = mBackStackIndices.size();
if (index < N) {
if (DEBUG) Log.v(TAG, "Setting back stack index " + index + " to " + bse);
mBackStackIndices.set(index, bse);
} else {
while (N < index) {
mBackStackIndices.add(null);
if (mAvailBackStackIndices == null) {
mAvailBackStackIndices = new ArrayList<Integer>();
}
if (DEBUG) Log.v(TAG, "Adding available back stack index " + N);
mAvailBackStackIndices.add(N);
N++;
}
if (DEBUG) Log.v(TAG, "Adding back stack index " + index + " with " + bse);
mBackStackIndices.add(bse);
}
}
}
public void freeBackStackIndex(int index) {
synchronized (this) {
mBackStackIndices.set(index, null);
if (mAvailBackStackIndices == null) {
mAvailBackStackIndices = new ArrayList<Integer>();
}
if (DEBUG) Log.v(TAG, "Freeing back stack index " + index);
mAvailBackStackIndices.add(index);
}
}
/**
* Only call from main thread!
*/
public boolean execPendingActions() {
if (mExecutingActions) {
throw new IllegalStateException("Recursive entry to executePendingTransactions");
}
if (Looper.myLooper() != mHost.getHandler().getLooper()) {
throw new IllegalStateException("Must be called from main thread of process");
}
boolean didSomething = false;
while (true) {
int numActions;
synchronized (this) {
if (mPendingActions == null || mPendingActions.size() == 0) {
break;
}
numActions = mPendingActions.size();
if (mTmpActions == null || mTmpActions.length < numActions) {
mTmpActions = new Runnable[numActions];
}
mPendingActions.toArray(mTmpActions);
mPendingActions.clear();
mHost.getHandler().removeCallbacks(mExecCommit);
}
mExecutingActions = true;
for (int i=0; i<numActions; i++) {
mTmpActions[i].run();
mTmpActions[i] = null;
}
mExecutingActions = false;
didSomething = true;
}
if (mHavePendingDeferredStart) {
boolean loadersRunning = false;
for (int i=0; i<mActive.size(); i++) {
Fragment f = mActive.get(i);
if (f != null && f.mLoaderManager != null) {
loadersRunning |= f.mLoaderManager.hasRunningLoaders();
}
}
if (!loadersRunning) {
mHavePendingDeferredStart = false;
startPendingDeferredFragments();
}
}
return didSomething;
}
void reportBackStackChanged() {
if (mBackStackChangeListeners != null) {
for (int i=0; i<mBackStackChangeListeners.size(); i++) {
mBackStackChangeListeners.get(i).onBackStackChanged();
}
}
}
void addBackStackState(BackStackRecord state) {
if (mBackStack == null) {
mBackStack = new ArrayList<BackStackRecord>();
}
mBackStack.add(state);
reportBackStackChanged();
}
@SuppressWarnings("unused")
boolean popBackStackState(Handler handler, String name, int id, int flags) {
if (mBackStack == null) {
return false;
}
if (name == null && id < 0 && (flags&POP_BACK_STACK_INCLUSIVE) == 0) {
int last = mBackStack.size()-1;
if (last < 0) {
return false;
}
final BackStackRecord bss = mBackStack.remove(last);
SparseArray<Fragment> firstOutFragments = new SparseArray<Fragment>();
SparseArray<Fragment> lastInFragments = new SparseArray<Fragment>();
bss.calculateBackFragments(firstOutFragments, lastInFragments);
bss.popFromBackStack(true, null, firstOutFragments, lastInFragments);
reportBackStackChanged();
} else {
int index = -1;
if (name != null || id >= 0) {
// If a name or ID is specified, look for that place in
// the stack.
index = mBackStack.size()-1;
while (index >= 0) {
BackStackRecord bss = mBackStack.get(index);
if (name != null && name.equals(bss.getName())) {
break;
}
if (id >= 0 && id == bss.mIndex) {
break;
}
index--;
}
if (index < 0) {
return false;
}
if ((flags&POP_BACK_STACK_INCLUSIVE) != 0) {
index--;
// Consume all following entries that match.
while (index >= 0) {
BackStackRecord bss = mBackStack.get(index);
if ((name != null && name.equals(bss.getName()))
|| (id >= 0 && id == bss.mIndex)) {
index--;
continue;
}
break;
}
}
}
if (index == mBackStack.size()-1) {
return false;
}
final ArrayList<BackStackRecord> states
= new ArrayList<BackStackRecord>();
for (int i=mBackStack.size()-1; i>index; i--) {
states.add(mBackStack.remove(i));
}
final int LAST = states.size()-1;
SparseArray<Fragment> firstOutFragments = new SparseArray<Fragment>();
SparseArray<Fragment> lastInFragments = new SparseArray<Fragment>();
for (int i=0; i<=LAST; i++) {
states.get(i).calculateBackFragments(firstOutFragments, lastInFragments);
}
BackStackRecord.TransitionState state = null;
for (int i=0; i<=LAST; i++) {
if (DEBUG) Log.v(TAG, "Popping back stack state: " + states.get(i));
state = states.get(i).popFromBackStack(i == LAST, state,
firstOutFragments, lastInFragments);
}
reportBackStackChanged();
}
return true;
}
ArrayList<Fragment> retainNonConfig() {
ArrayList<Fragment> fragments = null;
if (mActive != null) {
for (int i=0; i<mActive.size(); i++) {
Fragment f = mActive.get(i);
if (f != null && f.mRetainInstance) {
if (fragments == null) {
fragments = new ArrayList<Fragment>();
}
fragments.add(f);
f.mRetaining = true;
f.mTargetIndex = f.mTarget != null ? f.mTarget.mIndex : -1;
if (DEBUG) Log.v(TAG, "retainNonConfig: keeping retained " + f);
}
}
}
return fragments;
}
void saveFragmentViewState(Fragment f) {
if (f.mInnerView == null) {
return;
}
if (mStateArray == null) {
mStateArray = new SparseArray<Parcelable>();
} else {
mStateArray.clear();
}
f.mInnerView.saveHierarchyState(mStateArray);
if (mStateArray.size() > 0) {
f.mSavedViewState = mStateArray;
mStateArray = null;
}
}
Bundle saveFragmentBasicState(Fragment f) {
Bundle result = null;
if (mStateBundle == null) {
mStateBundle = new Bundle();
}
f.performSaveInstanceState(mStateBundle);
if (!mStateBundle.isEmpty()) {
result = mStateBundle;
mStateBundle = null;
}
if (f.mView != null) {
saveFragmentViewState(f);
}
if (f.mSavedViewState != null) {
if (result == null) {
result = new Bundle();
}
result.putSparseParcelableArray(
FragmentManagerImpl.VIEW_STATE_TAG, f.mSavedViewState);
}
if (!f.mUserVisibleHint) {
if (result == null) {
result = new Bundle();
}
// Only add this if it's not the default value
result.putBoolean(FragmentManagerImpl.USER_VISIBLE_HINT_TAG, f.mUserVisibleHint);
}
return result;
}
Parcelable saveAllState() {
// Make sure all pending operations have now been executed to get
// our state update-to-date.
execPendingActions();
if (HONEYCOMB) {
// As of Honeycomb, we save state after pausing. Prior to that
// it is before pausing. With fragments this is an issue, since
// there are many things you may do after pausing but before
// stopping that change the fragment state. For those older
// devices, we will not at this point say that we have saved
// the state, so we will allow them to continue doing fragment
// transactions. This retains the same semantics as Honeycomb,
// though you do have the risk of losing the very most recent state
// if the process is killed... we'll live with that.
mStateSaved = true;
}
if (mActive == null || mActive.size() <= 0) {
return null;
}
// First collect all active fragments.
int N = mActive.size();
FragmentState[] active = new FragmentState[N];
boolean haveFragments = false;
for (int i=0; i<N; i++) {
Fragment f = mActive.get(i);
if (f != null) {
if (f.mIndex < 0) {
throwException(new IllegalStateException(
"Failure saving state: active " + f
+ " has cleared index: " + f.mIndex));
}
haveFragments = true;
FragmentState fs = new FragmentState(f);
active[i] = fs;
if (f.mState > Fragment.INITIALIZING && fs.mSavedFragmentState == null) {
fs.mSavedFragmentState = saveFragmentBasicState(f);
if (f.mTarget != null) {
if (f.mTarget.mIndex < 0) {
throwException(new IllegalStateException(
"Failure saving state: " + f
+ " has target not in fragment manager: " + f.mTarget));
}
if (fs.mSavedFragmentState == null) {
fs.mSavedFragmentState = new Bundle();
}
putFragment(fs.mSavedFragmentState,
FragmentManagerImpl.TARGET_STATE_TAG, f.mTarget);
if (f.mTargetRequestCode != 0) {
fs.mSavedFragmentState.putInt(
FragmentManagerImpl.TARGET_REQUEST_CODE_STATE_TAG,
f.mTargetRequestCode);
}
}
} else {
fs.mSavedFragmentState = f.mSavedFragmentState;
}
if (DEBUG) Log.v(TAG, "Saved state of " + f + ": "
+ fs.mSavedFragmentState);
}
}
if (!haveFragments) {
if (DEBUG) Log.v(TAG, "saveAllState: no fragments!");
return null;
}
int[] added = null;
BackStackState[] backStack = null;
// Build list of currently added fragments.
if (mAdded != null) {
N = mAdded.size();
if (N > 0) {
added = new int[N];
for (int i=0; i<N; i++) {
added[i] = mAdded.get(i).mIndex;
if (added[i] < 0) {
throwException(new IllegalStateException(
"Failure saving state: active " + mAdded.get(i)
+ " has cleared index: " + added[i]));
}
if (DEBUG) Log.v(TAG, "saveAllState: adding fragment #" + i
+ ": " + mAdded.get(i));
}
}
}
// Now save back stack.
if (mBackStack != null) {
N = mBackStack.size();
if (N > 0) {
backStack = new BackStackState[N];
for (int i=0; i<N; i++) {
backStack[i] = new BackStackState(mBackStack.get(i));
if (DEBUG) Log.v(TAG, "saveAllState: adding back stack #" + i
+ ": " + mBackStack.get(i));
}
}
}
FragmentManagerState fms = new FragmentManagerState();
fms.mActive = active;
fms.mAdded = added;
fms.mBackStack = backStack;
return fms;
}
void restoreAllState(Parcelable state, List<Fragment> nonConfig) {
// If there is no saved state at all, then there can not be
// any nonConfig fragments either, so that is that.
if (state == null) return;
FragmentManagerState fms = (FragmentManagerState)state;
if (fms.mActive == null) return;
// First re-attach any non-config instances we are retaining back
// to their saved state, so we don't try to instantiate them again.
if (nonConfig != null) {
for (int i=0; i<nonConfig.size(); i++) {
Fragment f = nonConfig.get(i);
if (DEBUG) Log.v(TAG, "restoreAllState: re-attaching retained " + f);
FragmentState fs = fms.mActive[f.mIndex];
fs.mInstance = f;
f.mSavedViewState = null;
f.mBackStackNesting = 0;
f.mInLayout = false;
f.mAdded = false;
f.mTarget = null;
if (fs.mSavedFragmentState != null) {
fs.mSavedFragmentState.setClassLoader(mHost.getContext().getClassLoader());
f.mSavedViewState = fs.mSavedFragmentState.getSparseParcelableArray(
FragmentManagerImpl.VIEW_STATE_TAG);
f.mSavedFragmentState = fs.mSavedFragmentState;
}
}
}
// Build the full list of active fragments, instantiating them from
// their saved state.
mActive = new ArrayList<Fragment>(fms.mActive.length);
if (mAvailIndices != null) {
mAvailIndices.clear();
}
for (int i=0; i<fms.mActive.length; i++) {
FragmentState fs = fms.mActive[i];
if (fs != null) {
Fragment f = fs.instantiate(mHost, mParent);
if (DEBUG) Log.v(TAG, "restoreAllState: active #" + i + ": " + f);
mActive.add(f);
// Now that the fragment is instantiated (or came from being
// retained above), clear mInstance in case we end up re-restoring
// from this FragmentState again.
fs.mInstance = null;
} else {
mActive.add(null);
if (mAvailIndices == null) {
mAvailIndices = new ArrayList<Integer>();
}
if (DEBUG) Log.v(TAG, "restoreAllState: avail #" + i);
mAvailIndices.add(i);
}
}
// Update the target of all retained fragments.
if (nonConfig != null) {
for (int i=0; i<nonConfig.size(); i++) {
Fragment f = nonConfig.get(i);
if (f.mTargetIndex >= 0) {
if (f.mTargetIndex < mActive.size()) {
f.mTarget = mActive.get(f.mTargetIndex);
} else {
Log.w(TAG, "Re-attaching retained fragment " + f
+ " target no longer exists: " + f.mTargetIndex);
f.mTarget = null;
}
}
}
}
// Build the list of currently added fragments.
if (fms.mAdded != null) {
mAdded = new ArrayList<Fragment>(fms.mAdded.length);
for (int i=0; i<fms.mAdded.length; i++) {
Fragment f = mActive.get(fms.mAdded[i]);
if (f == null) {
throwException(new IllegalStateException(
"No instantiated fragment for index #" + fms.mAdded[i]));
}
f.mAdded = true;
if (DEBUG) Log.v(TAG, "restoreAllState: added #" + i + ": " + f);
if (mAdded.contains(f)) {
throw new IllegalStateException("Already added!");
}
mAdded.add(f);
}
} else {
mAdded = null;
}
// Build the back stack.
if (fms.mBackStack != null) {
mBackStack = new ArrayList<BackStackRecord>(fms.mBackStack.length);
for (int i=0; i<fms.mBackStack.length; i++) {
BackStackRecord bse = fms.mBackStack[i].instantiate(this);
if (DEBUG) {
Log.v(TAG, "restoreAllState: back stack #" + i
+ " (index " + bse.mIndex + "): " + bse);
LogWriter logw = new LogWriter(TAG);
PrintWriter pw = new PrintWriter(logw);
bse.dump(" ", pw, false);
}
mBackStack.add(bse);
if (bse.mIndex >= 0) {
setBackStackIndex(bse.mIndex, bse);
}
}
} else {
mBackStack = null;
}
}
public void attachController(FragmentHostCallback host,
FragmentContainer container, Fragment parent) {
if (mHost != null) throw new IllegalStateException("Already attached");
mHost = host;
mContainer = container;
mParent = parent;
}
public void noteStateNotSaved() {
mStateSaved = false;
}
public void dispatchCreate() {
mStateSaved = false;
moveToState(Fragment.CREATED, false);
}
public void dispatchActivityCreated() {
mStateSaved = false;
moveToState(Fragment.ACTIVITY_CREATED, false);
}
public void dispatchStart() {
mStateSaved = false;
moveToState(Fragment.STARTED, false);
}
public void dispatchResume() {
mStateSaved = false;
moveToState(Fragment.RESUMED, false);
}
public void dispatchPause() {
moveToState(Fragment.STARTED, false);
}
public void dispatchStop() {
// See saveAllState() for the explanation of this. We do this for
// all platform versions, to keep our behavior more consistent between
// them.
mStateSaved = true;
moveToState(Fragment.STOPPED, false);
}
public void dispatchReallyStop() {
moveToState(Fragment.ACTIVITY_CREATED, false);
}
public void dispatchDestroyView() {
moveToState(Fragment.CREATED, false);
}
public void dispatchDestroy() {
mDestroyed = true;
execPendingActions();
moveToState(Fragment.INITIALIZING, false);
mHost = null;
mContainer = null;
mParent = null;
}
public void dispatchConfigurationChanged(Configuration newConfig) {
if (mAdded != null) {
for (int i=0; i<mAdded.size(); i++) {
Fragment f = mAdded.get(i);
if (f != null) {
f.performConfigurationChanged(newConfig);
}
}
}
}
public void dispatchLowMemory() {
if (mAdded != null) {
for (int i=0; i<mAdded.size(); i++) {
Fragment f = mAdded.get(i);
if (f != null) {
f.performLowMemory();
}
}
}
}
public boolean dispatchCreateOptionsMenu(Menu menu, MenuInflater inflater) {
boolean show = false;
ArrayList<Fragment> newMenus = null;
if (mAdded != null) {
for (int i=0; i<mAdded.size(); i++) {
Fragment f = mAdded.get(i);
if (f != null) {
if (f.performCreateOptionsMenu(menu, inflater)) {
show = true;
if (newMenus == null) {
newMenus = new ArrayList<Fragment>();
}
newMenus.add(f);
}
}
}
}
if (mCreatedMenus != null) {
for (int i=0; i<mCreatedMenus.size(); i++) {
Fragment f = mCreatedMenus.get(i);
if (newMenus == null || !newMenus.contains(f)) {
f.onDestroyOptionsMenu();
}
}
}
mCreatedMenus = newMenus;
return show;
}
public boolean dispatchPrepareOptionsMenu(Menu menu) {
boolean show = false;
if (mAdded != null) {
for (int i=0; i<mAdded.size(); i++) {
Fragment f = mAdded.get(i);
if (f != null) {
if (f.performPrepareOptionsMenu(menu)) {
show = true;
}
}
}
}
return show;
}
public boolean dispatchOptionsItemSelected(MenuItem item) {
if (mAdded != null) {
for (int i=0; i<mAdded.size(); i++) {
Fragment f = mAdded.get(i);
if (f != null) {
if (f.performOptionsItemSelected(item)) {
return true;
}
}
}
}
return false;
}
public boolean dispatchContextItemSelected(MenuItem item) {
if (mAdded != null) {
for (int i=0; i<mAdded.size(); i++) {
Fragment f = mAdded.get(i);
if (f != null) {
if (f.performContextItemSelected(item)) {
return true;
}
}
}
}
return false;
}
public void dispatchOptionsMenuClosed(Menu menu) {
if (mAdded != null) {
for (int i=0; i<mAdded.size(); i++) {
Fragment f = mAdded.get(i);
if (f != null) {
f.performOptionsMenuClosed(menu);
}
}
}
}
public static int reverseTransit(int transit) {
int rev = 0;
switch (transit) {
case FragmentTransaction.TRANSIT_FRAGMENT_OPEN:
rev = FragmentTransaction.TRANSIT_FRAGMENT_CLOSE;
break;
case FragmentTransaction.TRANSIT_FRAGMENT_CLOSE:
rev = FragmentTransaction.TRANSIT_FRAGMENT_OPEN;
break;
case FragmentTransaction.TRANSIT_FRAGMENT_FADE:
rev = FragmentTransaction.TRANSIT_FRAGMENT_FADE;
break;
}
return rev;
}
public static final int ANIM_STYLE_OPEN_ENTER = 1;
public static final int ANIM_STYLE_OPEN_EXIT = 2;
public static final int ANIM_STYLE_CLOSE_ENTER = 3;
public static final int ANIM_STYLE_CLOSE_EXIT = 4;
public static final int ANIM_STYLE_FADE_ENTER = 5;
public static final int ANIM_STYLE_FADE_EXIT = 6;
public static int transitToStyleIndex(int transit, boolean enter) {
int animAttr = -1;
switch (transit) {
case FragmentTransaction.TRANSIT_FRAGMENT_OPEN:
animAttr = enter ? ANIM_STYLE_OPEN_ENTER : ANIM_STYLE_OPEN_EXIT;
break;
case FragmentTransaction.TRANSIT_FRAGMENT_CLOSE:
animAttr = enter ? ANIM_STYLE_CLOSE_ENTER : ANIM_STYLE_CLOSE_EXIT;
break;
case FragmentTransaction.TRANSIT_FRAGMENT_FADE:
animAttr = enter ? ANIM_STYLE_FADE_ENTER : ANIM_STYLE_FADE_EXIT;
break;
}
return animAttr;
}
@Override
public View onCreateView(View parent, String name, Context context, AttributeSet attrs) {
if (!"fragment".equals(name)) {
return null;
}
String fname = attrs.getAttributeValue(null, "class");
TypedArray a = context.obtainStyledAttributes(attrs, FragmentTag.Fragment);
if (fname == null) {
fname = a.getString(FragmentTag.Fragment_name);
}
int id = a.getResourceId(FragmentTag.Fragment_id, View.NO_ID);
String tag = a.getString(FragmentTag.Fragment_tag);
a.recycle();
if (!Fragment.isSupportFragmentClass(mHost.getContext(), fname)) {
// Invalid support lib fragment; let the device's framework handle it.
// This will allow android.app.Fragments to do the right thing.
return null;
}
int containerId = parent != null ? parent.getId() : 0;
if (containerId == View.NO_ID && id == View.NO_ID && tag == null) {
throw new IllegalArgumentException(attrs.getPositionDescription()
+ ": Must specify unique android:id, android:tag, or have a parent with an id for " + fname);
}
// If we restored from a previous state, we may already have
// instantiated this fragment from the state and should use
// that instance instead of making a new one.
Fragment fragment = id != View.NO_ID ? findFragmentById(id) : null;
if (fragment == null && tag != null) {
fragment = findFragmentByTag(tag);
}
if (fragment == null && containerId != View.NO_ID) {
fragment = findFragmentById(containerId);
}
if (FragmentManagerImpl.DEBUG) Log.v(TAG, "onCreateView: id=0x"
+ Integer.toHexString(id) + " fname=" + fname
+ " existing=" + fragment);
if (fragment == null) {
fragment = Fragment.instantiate(context, fname);
fragment.mFromLayout = true;
fragment.mFragmentId = id != 0 ? id : containerId;
fragment.mContainerId = containerId;
fragment.mTag = tag;
fragment.mInLayout = true;
fragment.mFragmentManager = this;
fragment.onInflate(mHost.getContext(), attrs, fragment.mSavedFragmentState);
addFragment(fragment, true);
} else if (fragment.mInLayout) {
// A fragment already exists and it is not one we restored from
// previous state.
throw new IllegalArgumentException(attrs.getPositionDescription()
+ ": Duplicate id 0x" + Integer.toHexString(id)
+ ", tag " + tag + ", or parent id 0x" + Integer.toHexString(containerId)
+ " with another fragment for " + fname);
} else {
// This fragment was retained from a previous instance; get it
// going now.
fragment.mInLayout = true;
// If this fragment is newly instantiated (either right now, or
// from last saved state), then give it the attributes to
// initialize itself.
if (!fragment.mRetaining) {
fragment.onInflate(mHost.getContext(), attrs, fragment.mSavedFragmentState);
}
}
// If we haven't finished entering the CREATED state ourselves yet,
// push the inflated child fragment along.
if (mCurState < Fragment.CREATED && fragment.mFromLayout) {
moveToState(fragment, Fragment.CREATED, 0, 0, false);
} else {
moveToState(fragment);
}
if (fragment.mView == null) {
throw new IllegalStateException("Fragment " + fname
+ " did not create a view.");
}
if (id != 0) {
fragment.mView.setId(id);
}
if (fragment.mView.getTag() == null) {
fragment.mView.setTag(tag);
}
return fragment.mView;
}
LayoutInflaterFactory getLayoutInflaterFactory() {
return this;
}
static class FragmentTag {
public static final int[] Fragment = {
0x01010003, 0x010100d0, 0x010100d1
};
public static final int Fragment_id = 1;
public static final int Fragment_name = 0;
public static final int Fragment_tag = 2;
}
}
| am db1c6057: am fa42920c: am 7fe53f12: am e5579d82: am 1b84066e: Fix support Fragment attribute reading
* commit 'db1c6057ee97b2ba1354e13397c4d8dff14f617d':
Fix support Fragment attribute reading
| v4/java/android/support/v4/app/FragmentManager.java | am db1c6057: am fa42920c: am 7fe53f12: am e5579d82: am 1b84066e: Fix support Fragment attribute reading | <ide><path>4/java/android/support/v4/app/FragmentManager.java
<ide> fragment.mTag = tag;
<ide> fragment.mInLayout = true;
<ide> fragment.mFragmentManager = this;
<add> fragment.mHost = mHost;
<ide> fragment.onInflate(mHost.getContext(), attrs, fragment.mSavedFragmentState);
<ide> addFragment(fragment, true);
<ide> |
|
Java | mit | error: pathspec 'L2Tests/test/com/microsoft/alm/L2/tfvc/TfvcRootCheckerTest.java' did not match any file(s) known to git
| e732435f1e8351ce0c5f187edaae10f1092a33f3 | 1 | Microsoft/vso-intellij,Microsoft/vso-intellij | // Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See License.txt in the project root.
package com.microsoft.alm.L2.tfvc;
import com.intellij.openapi.vcs.VcsRootChecker;
import com.microsoft.alm.plugin.idea.tfvc.extensions.TfvcRootChecker;
import com.microsoft.alm.plugin.services.PropertyService;
import org.junit.Test;
import java.io.IOException;
public class TfvcRootCheckerTest extends TfvcCheckoutTestBase {
private String savedUseReactiveClientValue;
@Override
protected void setUp() throws Exception {
super.setUp();
savedUseReactiveClientValue = PropertyService.getInstance()
.getProperty(PropertyService.PROP_TFVC_USE_REACTIVE_CLIENT);
}
@Override
protected void tearDown() throws Exception {
PropertyService.getInstance()
.setProperty(PropertyService.PROP_TFVC_USE_REACTIVE_CLIENT, savedUseReactiveClientValue);
super.tearDown();
}
private void doRootCheckerTest() throws IOException, InterruptedException {
checkoutTestRepository(workspace -> {
TfvcRootChecker rootChecker = VcsRootChecker.EXTENSION_POINT_NAME.findExtension(TfvcRootChecker.class);
assertNotNull(rootChecker);
assertTrue(rootChecker.isRoot(workspace.toString()));
});
}
@Test(timeout = 60000)
public void testRootChecker() throws InterruptedException, IOException {
PropertyService.getInstance().setProperty(PropertyService.PROP_TFVC_USE_REACTIVE_CLIENT, "false");
doRootCheckerTest();
PropertyService.getInstance().setProperty(PropertyService.PROP_TFVC_USE_REACTIVE_CLIENT, "true");
doRootCheckerTest();
}
}
| L2Tests/test/com/microsoft/alm/L2/tfvc/TfvcRootCheckerTest.java | TfvcRootChecker: add integration tests for both classic and reactive clients
| L2Tests/test/com/microsoft/alm/L2/tfvc/TfvcRootCheckerTest.java | TfvcRootChecker: add integration tests for both classic and reactive clients | <ide><path>2Tests/test/com/microsoft/alm/L2/tfvc/TfvcRootCheckerTest.java
<add>// Copyright (c) Microsoft. All rights reserved.
<add>// Licensed under the MIT license. See License.txt in the project root.
<add>
<add>package com.microsoft.alm.L2.tfvc;
<add>
<add>import com.intellij.openapi.vcs.VcsRootChecker;
<add>import com.microsoft.alm.plugin.idea.tfvc.extensions.TfvcRootChecker;
<add>import com.microsoft.alm.plugin.services.PropertyService;
<add>import org.junit.Test;
<add>
<add>import java.io.IOException;
<add>
<add>public class TfvcRootCheckerTest extends TfvcCheckoutTestBase {
<add>
<add> private String savedUseReactiveClientValue;
<add>
<add> @Override
<add> protected void setUp() throws Exception {
<add> super.setUp();
<add> savedUseReactiveClientValue = PropertyService.getInstance()
<add> .getProperty(PropertyService.PROP_TFVC_USE_REACTIVE_CLIENT);
<add> }
<add>
<add> @Override
<add> protected void tearDown() throws Exception {
<add> PropertyService.getInstance()
<add> .setProperty(PropertyService.PROP_TFVC_USE_REACTIVE_CLIENT, savedUseReactiveClientValue);
<add> super.tearDown();
<add> }
<add>
<add> private void doRootCheckerTest() throws IOException, InterruptedException {
<add> checkoutTestRepository(workspace -> {
<add> TfvcRootChecker rootChecker = VcsRootChecker.EXTENSION_POINT_NAME.findExtension(TfvcRootChecker.class);
<add> assertNotNull(rootChecker);
<add> assertTrue(rootChecker.isRoot(workspace.toString()));
<add> });
<add> }
<add>
<add> @Test(timeout = 60000)
<add> public void testRootChecker() throws InterruptedException, IOException {
<add> PropertyService.getInstance().setProperty(PropertyService.PROP_TFVC_USE_REACTIVE_CLIENT, "false");
<add> doRootCheckerTest();
<add> PropertyService.getInstance().setProperty(PropertyService.PROP_TFVC_USE_REACTIVE_CLIENT, "true");
<add> doRootCheckerTest();
<add> }
<add>} |
|
Java | mit | 0e77ab6a11390f0eb521d28814d00d87f4759fe8 | 0 | feroult/yawp,feroult/yawp,feroult/yawp,feroult/yawp,feroult/yawp,feroult/yawp | package io.yawp.servlet.hierarchy;
import io.yawp.commons.utils.ServletTestCase;
import io.yawp.repository.models.basic.HookedObject;
import io.yawp.repository.models.hierarchy.AnotherObjectSubClass;
import io.yawp.repository.models.hierarchy.ObjectSubClass;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class HierarchyHookTest extends ServletTestCase {
@Test
public void testAllObjectsHook() {
String json = post("/hooked_objects", "{ \"stringValue\": \"all_objects\" }");
HookedObject object = from(json, HookedObject.class);
assertEquals("xpto all objects", object.getStringValue());
}
@Test
public void testSuperClassHook() {
String json = post("/hierarchy_subclasses/1", "{ \"name\": \"john\" }");
ObjectSubClass object = from(json, ObjectSubClass.class);
assertEquals("john + superclass hook", object.getName());
}
@Test
public void testMoreSpecificSubClassHook() {
String json = post("/hierarchy_another-subclasses/1", "{ \"name\": \"john\" }");
AnotherObjectSubClass object = from(json, AnotherObjectSubClass.class);
assertEquals("john + more specific hook", object.getName());
}
}
| yawp-core/src/test/java/io/yawp/servlet/hierarchy/HierarchyHookTest.java | package io.yawp.servlet.hierarchy;
import io.yawp.commons.utils.ServletTestCase;
import io.yawp.repository.models.basic.HookedObject;
import io.yawp.repository.models.hierarchy.AnotherObjectSubClass;
import io.yawp.repository.models.hierarchy.ObjectSubClass;
import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class HierarchyHookTest extends ServletTestCase {
@Test
public void testAllObjectsHook() {
String json = post("/hooked_objects", "{ stringValue: 'all_objects' }");
HookedObject object = from(json, HookedObject.class);
assertEquals("xpto all objects", object.getStringValue());
}
@Test
public void testSuperClassHook() {
String json = post("/hierarchy_subclasses/1", "{ name: 'john' }");
ObjectSubClass object = from(json, ObjectSubClass.class);
assertEquals("john + superclass hook", object.getName());
}
@Test
public void testMoreSpecificSubClassHook() {
String json = post("/hierarchy_another-subclasses/1", "{ name: 'john' }");
AnotherObjectSubClass object = from(json, AnotherObjectSubClass.class);
assertEquals("john + more specific hook", object.getName());
}
}
| genson
| yawp-core/src/test/java/io/yawp/servlet/hierarchy/HierarchyHookTest.java | genson | <ide><path>awp-core/src/test/java/io/yawp/servlet/hierarchy/HierarchyHookTest.java
<ide> import io.yawp.repository.models.basic.HookedObject;
<ide> import io.yawp.repository.models.hierarchy.AnotherObjectSubClass;
<ide> import io.yawp.repository.models.hierarchy.ObjectSubClass;
<del>import org.junit.Ignore;
<ide> import org.junit.Test;
<ide>
<ide> import static org.junit.Assert.assertEquals;
<ide>
<ide> @Test
<ide> public void testAllObjectsHook() {
<del> String json = post("/hooked_objects", "{ stringValue: 'all_objects' }");
<add> String json = post("/hooked_objects", "{ \"stringValue\": \"all_objects\" }");
<ide> HookedObject object = from(json, HookedObject.class);
<ide> assertEquals("xpto all objects", object.getStringValue());
<ide> }
<ide>
<ide> @Test
<ide> public void testSuperClassHook() {
<del> String json = post("/hierarchy_subclasses/1", "{ name: 'john' }");
<add> String json = post("/hierarchy_subclasses/1", "{ \"name\": \"john\" }");
<ide>
<ide> ObjectSubClass object = from(json, ObjectSubClass.class);
<ide>
<ide>
<ide> @Test
<ide> public void testMoreSpecificSubClassHook() {
<del> String json = post("/hierarchy_another-subclasses/1", "{ name: 'john' }");
<add> String json = post("/hierarchy_another-subclasses/1", "{ \"name\": \"john\" }");
<ide>
<ide> AnotherObjectSubClass object = from(json, AnotherObjectSubClass.class);
<ide> |
|
Java | apache-2.0 | 642dbb424eb9cb3f491081ad246c1077fbf6038d | 0 | nhelferty/sdn-project,xuraylei/floodlight,alexreimers/floodlight,drinkwithwater/floodlightplus,Pengfei-Lu/floodlight,TidyHuang/floodlight,alsmadi/CSCI-6617,rsharo/floodlight,moisesber/floodlight,drinkwithwater/floodlightplus,Linerd/sdn_optimization,alberthitanaya/floodlight-dnscollector,baykovr/floodlight,drinkwithwater/floodlightplus,xph906/SDN-NW,avbleverik/floodlight,similecat/floodlightsec,akoshibe/fl-hhcp,yeasy/floodlight-lc,akoshibe/fl-hhcp,AndreMantas/floodlight,geddings/floodlight,duanjp8617/floodlight,TidyHuang/floodlight,thisthat/floodlight-controller,rizard/floodlight,m1k3lin0/SDNProject,cbarrin/EAGERFloodlight,egenevie/newnet,rizard/SOSForFloodlight,omkale/myfloodlight,Linerd/sdn_optimization,rhoybeen/floodlightLB,chris19891128/FloodlightSec,xph906/SDN-ec2,m1k3lin0/SDNProject,iluckydonkey/floodlight,iluckydonkey/floodlight,dhruvkakadiya/FloodlightLoadBalancer,teja-/floodlight,woniu17/floodlight,phisolani/floodlight,lalithsuresh/odin-master,CS-6617-Java/Floodlight,deepurple/floodlight,rhoybeen/floodlightLB,xph906/SDN-ec2,yeasy/floodlight-lc,SujithPandel/floodlight,wallnerryan/floodlight,jmiserez/floodlight,rcchan/cs168-sdn-floodlight,fazevedo86/floodlight,wallnerryan/floodlight,srcvirus/floodlight,andiwundsam/floodlight-sync-proto,CS-6617-Java/Floodlight,TKTL-SDN/SoftOffload-Master,CS-6617-Java/Floodlight,aprakash6/floodlight_video_cacher,TidyHuang/floodlight,nhelferty/sdn-project,rizard/geni-cinema,similecat/floodlightsec,schuza/odin-master,rhoybeen/floodlightLB,marcbaetica/Floodlight-OVS-OF-Network-Solution-,rizard/geni-cinema,thisthat/floodlight-controller,riajkhu/floodlight,Linerd/sdn_optimization,09zwcbupt/floodlight,marcbaetica/Floodlight-OVS-OF-Network-Solution-,ZhangMenghao/Floodlight,pixuan/floodlight,similecat/floodlightsec,swiatecki/DTUSDN,m1k3lin0/SDNProject,jmiserez/floodlight,rizard/floodlight,rizard/SOSForFloodlight,scofieldsoros/floodlight-0.9,teja-/floodlight,gfsantanna/firewall_SDN,avbleverik/floodlight,deepurple/floodlight,fazevedo86/floodlight,wallnerryan/floodlight,niuqg/floodlight-test,m1k3lin0/SDNProject,andiwundsam/floodlight-sync-proto,chechoRP/floodlight,chris19891128/FloodlightSec,iluckydonkey/floodlight,onebsv1/floodlight,Wi5/odin-wi5-controller,omkale/myfloodlight,daniel666/multicastSDN,wallnerryan/FL_HAND,wallnerryan/FL_HAND,onebsv1/floodlight,m1k3lin0/SDNProject,hgupta2/floodlight2,gfsantanna/firewall_SDN,UdS-TelecommunicationsLab/floodlight,rcchan/cs168-sdn-floodlight,Wi5/odin-wi5-controller,xph906/SDN-NW,xuraylei/floodlight,avbleverik/floodlight,Pengfei-Lu/floodlight,ZhangMenghao/Floodlight,moisesber/floodlight,netgroup/floodlight,UdS-TelecommunicationsLab/floodlight,cbarrin/EAGERFloodlight,kvm2116/floodlight,scofieldsoros/floodlight-0.9,floodlight/floodlight,thisthat/floodlight-controller,09zwcbupt/floodlight,netgroup/floodlight,SujithPandel/floodlight,CS-6617-Java/Floodlight,teja-/floodlight,avbleverik/floodlight,daniel666/multicastSDN,alberthitanaya/floodlight-dnscollector,schuza/odin-master,niuqg/floodlight-test,Pengfei-Lu/floodlight,jmiserez/floodlight,baykovr/floodlight,jmiserez/floodlight,chechoRP/floodlight,rcchan/cs168-sdn-floodlight,Pengfei-Lu/floodlight,AndreMantas/floodlight,pablotiburcio/AutoManIoT,fazevedo86/floodlight,similecat/floodlightsec,marcbaetica/Floodlight-OVS-OF-Network-Solution-,baykovr/floodlight,phisolani/floodlight,xph906/SDN-NW,schuza/odin-master,chinmaymhatre91/floodlight,iluckydonkey/floodlight,xph906/SDN-NW,StefanoSalsano/my-floodlight,chinmaymhatre91/floodlight,rizard/fast-failover-demo,onebsv1/floodlightworkbench,rhoybeen/floodlightLB,JinWenQiang/FloodlightController,drinkwithwater/floodlightplus,xph906/SDN,thisthat/floodlight-controller,phisolani/floodlight,pablotiburcio/AutoManIoT,alsmadi/CSCI-6617,alexreimers/floodlight,riajkhu/floodlight,deepurple/floodlight,andi-bigswitch/floodlight-oss,xph906/SDN,wallnerryan/floodlight,Wi5/odin-wi5-controller,egenevie/newnet,xph906/SDN-ec2,swiatecki/DTUSDN,geddings/floodlight,andiwundsam/floodlight-sync-proto,chechoRP/floodlight,xph906/SDN,woniu17/floodlight,kwanggithub/umfloodlight,ZhangMenghao/Floodlight,schuza/odin-master,pixuan/floodlight,gfsantanna/firewall_SDN,rizard/SOSForFloodlight,rcchan/cs168-sdn-floodlight,alsmadi/CSCI-6617,alexreimers/floodlight,dhruvkakadiya/FloodlightLoadBalancer,dhruvkakadiya/FloodlightLoadBalancer,Pengfei-Lu/floodlight,marymiller/floodlight,xph906/SDN,fazevedo86/floodlight,omkale/myfloodlight,aprakash6/floodlight_video_cacher,similecat/floodlightsec,smartnetworks/floodlight,dhruvkakadiya/FloodlightLoadBalancer,pablotiburcio/AutoManIoT,rizard/geni-cinema,riajkhu/floodlight,aprakash6/floodlight_video_cacher,rizard/floodlight,alsmadi/CSCI-6617,CS-6617-Java/Floodlight,alberthitanaya/floodlight-dnscollector,duanjp8617/floodlight,CS-6617-Java/Floodlight,egenevie/newnet,TidyHuang/floodlight,onebsv1/floodlight,avbleverik/floodlight,gfsantanna/firewall_SDN,alexreimers/floodlight,nhelferty/sdn-project,wallnerryan/FL_HAND,alberthitanaya/floodlight-dnscollector,woniu17/floodlight,deepurple/floodlight,akoshibe/fl-hhcp,chechoRP/floodlight,woniu17/floodlight,pixuan/floodlight,moisesber/floodlight,rizard/fast-failover-demo,pixuan/floodlight,ZhangMenghao/Floodlight,aprakash6/floodlight_video_cacher,Linerd/sdn_optimization,onebsv1/floodlightworkbench,geddings/floodlight,duanjp8617/floodlight,egenevie/newnet,rsharo/floodlight,SujithPandel/floodlight,TKTL-SDN/SoftOffload-Master,09zwcbupt/floodlight,JinWenQiang/FloodlightController,chris19891128/FloodlightSec,JinWenQiang/FloodlightController,woniu17/floodlight,moisesber/floodlight,phisolani/floodlight,iluckydonkey/floodlight,marymiller/floodlight,rsharo/floodlight,swiatecki/DTUSDN,hgupta2/floodlight2,hgupta2/floodlight2,lalithsuresh/odin-master,AndreMantas/floodlight,marymiller/floodlight,onebsv1/floodlightworkbench,rizard/fast-failover-demo,andi-bigswitch/floodlight-oss,UdS-TelecommunicationsLab/floodlight,09zwcbupt/floodlight,chechoRP/floodlight,smartnetworks/floodlight,chinmaymhatre91/floodlight,kvm2116/floodlight,swiatecki/DTUSDN,chinmaymhatre91/floodlight,kwanggithub/umfloodlight,scofieldsoros/floodlight-0.9,andi-bigswitch/floodlight-oss,phisolani/floodlight,xph906/SDN-ec2,srcvirus/floodlight,UdS-TelecommunicationsLab/floodlight,dhruvkakadiya/FloodlightLoadBalancer,netgroup/floodlight,ZhangMenghao/Floodlight,srcvirus/floodlight,cbarrin/EAGERFloodlight,smartnetworks/floodlight,yeasy/floodlight-lc,Wi5/odin-wi5-controller,lalithsuresh/odin-master,riajkhu/floodlight,netgroup/floodlight,niuqg/floodlight-test,rizard/fast-failover-demo,baykovr/floodlight,akoshibe/fl-hhcp,kwanggithub/umfloodlight,TKTL-SDN/SoftOffload-Master,chris19891128/FloodlightSec,thisthat/floodlight-controller,andi-bigswitch/floodlight-oss,xuraylei/floodlight,rizard/geni-cinema,niuqg/floodlight-test,TKTL-SDN/SoftOffload-Master,srcvirus/floodlight,TKTL-SDN/SoftOffload-Master,JinWenQiang/FloodlightController,rcchan/cs168-sdn-floodlight,chinmaymhatre91/floodlight,rizard/geni-cinema,alberthitanaya/floodlight-dnscollector,deepurple/floodlight,pixuan/floodlight,fazevedo86/floodlight,JinWenQiang/FloodlightController,smartnetworks/floodlight,aprakash6/floodlight_video_cacher,xph906/SDN,nhelferty/sdn-project,floodlight/floodlight,jmiserez/floodlight,StefanoSalsano/my-floodlight,andiwundsam/floodlight-sync-proto,yeasy/floodlight-lc,kwanggithub/umfloodlight,xph906/SDN-NW,StefanoSalsano/my-floodlight,swiatecki/DTUSDN,Wi5/odin-wi5-controller,lalithsuresh/odin-master,kvm2116/floodlight,duanjp8617/floodlight,marcbaetica/Floodlight-OVS-OF-Network-Solution-,UdS-TelecommunicationsLab/floodlight,alexreimers/floodlight,CS-6617-Java/Floodlight,floodlight/floodlight,baykovr/floodlight,marymiller/floodlight,TidyHuang/floodlight,smartnetworks/floodlight,gfsantanna/firewall_SDN,StefanoSalsano/my-floodlight,rizard/fast-failover-demo,teja-/floodlight,SujithPandel/floodlight,moisesber/floodlight,netgroup/floodlight,daniel666/multicastSDN,rhoybeen/floodlightLB,schuza/odin-master,hgupta2/floodlight2,daniel666/multicastSDN,xph906/SDN-ec2,alsmadi/CSCI-6617,wallnerryan/FL_HAND,duanjp8617/floodlight,scofieldsoros/floodlight-0.9 | package net.floodlightcontroller.virtualnetwork.forwarding;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.openflow.protocol.OFFlowMod;
import org.openflow.protocol.OFMatch;
import org.openflow.protocol.OFMessage;
import org.openflow.protocol.OFPacketIn;
import org.openflow.protocol.OFPacketOut;
import org.openflow.protocol.OFType;
import org.openflow.protocol.action.OFAction;
import org.openflow.util.HexString;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import net.floodlightcontroller.core.FloodlightContext;
import net.floodlightcontroller.core.IFloodlightProviderService;
import net.floodlightcontroller.core.IOFMessageListener;
import net.floodlightcontroller.core.IOFSwitch;
import net.floodlightcontroller.core.module.FloodlightModuleContext;
import net.floodlightcontroller.core.module.FloodlightModuleException;
import net.floodlightcontroller.core.module.IFloodlightModule;
import net.floodlightcontroller.core.module.IFloodlightService;
import net.floodlightcontroller.core.util.AppCookie;
import net.floodlightcontroller.packet.ARP;
import net.floodlightcontroller.packet.Ethernet;
import net.floodlightcontroller.packet.IPv4;
import net.floodlightcontroller.restserver.IRestApiService;
import net.floodlightcontroller.util.MACAddress;
import net.floodlightcontroller.virtualnetwork.IVirtualNetworkService;
/**
* A simple Layer 2 (MAC based) network virtualization module. This module allows
* you to create simple L2 networks (host + gateway) and will drop traffic if
* they are not on the same virtual network.
*
* LIMITATIONS
* - This module does not allow overlapping of IPs or MACs
* - You can only have 1 gateway per virtual network (can be shared)
* - There is filtering of multicast/broadcast traffic
*
* @author alexreimers
*/
public class VirtualNetworkFilter
implements IFloodlightModule, IVirtualNetworkService, IOFMessageListener {
protected static Logger log = LoggerFactory.getLogger(VirtualNetworkFilter.class);
private final short FLOW_MOD_DEFAULT_IDLE_TIMEOUT = 5; // in seconds
private final short APP_ID = 20;
// Our dependencies
IFloodlightProviderService floodlightProvider;
IRestApiService restApi;
// Our internal state
protected Map<String, String> nameToGuid; // Logical name -> Network ID
protected Map<String, Integer> guidToGateway; // Network ID -> Gateway IP
protected Map<Integer, Set<String>> gatewayToGuid; // Gateway IP -> Network ID
protected Map<MACAddress, Integer> macToGateway; // Gateway MAC -> Gateway IP
protected Map<MACAddress, String> macToGuid; // Host MAC -> Network ID
protected Map<String, MACAddress> portToMac; // Host MAC -> logical port name
protected void addGateway(String guid, Integer ip) {
if (ip.intValue() != 0) {
guidToGateway.put(guid, ip);
if (gatewayToGuid.containsKey(ip)) {
Set<String> gSet = gatewayToGuid.get(ip);
gSet.add(guid);
} else {
Set<String> gSet = Collections.synchronizedSet(new HashSet<String>());
gSet.add(guid);
gatewayToGuid.put(ip, gSet);
}
}
}
protected void deleteGateway(String guid) {
Integer gwIp = guidToGateway.remove(guid);
if (gwIp == null) return;
Set<String> gSet = gatewayToGuid.get(gwIp);
gSet.remove(guid);
}
// IVirtualNetworkService
@Override
public void createNetwork(String guid, String network, Integer gateway) {
if (log.isDebugEnabled()) {
String gw = null;
try {
gw = IPv4.fromIPv4Address(gateway);
} catch (Exception e) {
// fail silently
}
log.debug("Creating network {} with ID {} and gateway {}",
new Object[] {network, guid, gw});
}
if (!nameToGuid.isEmpty()) {
// We have to iterate all the networks to handle name/gateway changes
for (Entry<String, String> entry : nameToGuid.entrySet()) {
if (entry.getValue().equals(guid)) {
nameToGuid.remove(entry.getKey());
break;
}
}
}
nameToGuid.put(network, guid);
// If they don't specify a new gateway the old one will be preserved
if ((gateway != null) && (gateway != 0))
addGateway(guid, gateway);
}
@Override
public void deleteNetwork(String guid) {
String name = null;
if (nameToGuid.isEmpty()) {
log.warn("Could not delete network with ID {}, network doesn't exist",
guid);
return;
}
for (Entry<String, String> entry : nameToGuid.entrySet()) {
if (entry.getValue().equals(guid)) {
name = entry.getKey();
break;
}
log.warn("Could not delete network with ID {}, network doesn't exist",
guid);
}
if (log.isDebugEnabled())
log.debug("Deleting network with name {} ID {}", name, guid);
nameToGuid.remove(name);
deleteGateway(guid);
Collection<MACAddress> deleteList = new ArrayList<MACAddress>();
for (MACAddress host : macToGuid.keySet()) {
if (macToGuid.get(host).equals(guid)) {
deleteList.add(host);
}
}
for (MACAddress mac : deleteList) {
if (log.isDebugEnabled()) {
log.debug("Removing host {} from network {}",
HexString.toHexString(mac.toBytes()), guid);
}
macToGuid.remove(mac);
for (Entry<String, MACAddress> entry : portToMac.entrySet()) {
if (entry.getValue().equals(mac)) {
portToMac.remove(entry.getKey());
break;
}
}
}
}
@Override
public void addHost(MACAddress mac, String guid, String port) {
if (guid != null) {
if (log.isDebugEnabled()) {
log.debug("Adding {} to network ID {} on port {}",
new Object[] {mac, guid, port});
}
// We ignore old mappings
macToGuid.put(mac, guid);
portToMac.put(port, mac);
} else {
log.warn("Could not add MAC {} to network ID {} on port {}, the network does not exist",
new Object[] {mac, guid, port});
}
}
@Override
public void deleteHost(MACAddress mac, String port) {
if (log.isDebugEnabled()) {
log.debug("Removing host {} from port {}", mac, port);
}
if (mac == null && port == null) return;
if (port != null) {
MACAddress host = portToMac.remove(port);
macToGuid.remove(host);
} else if (mac != null) {
if (!portToMac.isEmpty()) {
for (Entry<String, MACAddress> entry : portToMac.entrySet()) {
if (entry.getValue().equals(mac)) {
portToMac.remove(entry.getKey());
macToGuid.remove(entry.getValue());
return;
}
}
}
}
}
@Override
public Collection<Class<? extends IFloodlightService>> getModuleServices() {
Collection<Class<? extends IFloodlightService>> l =
new ArrayList<Class<? extends IFloodlightService>>();
l.add(IVirtualNetworkService.class);
return l;
}
@Override
public Map<Class<? extends IFloodlightService>, IFloodlightService>
getServiceImpls() {
Map<Class<? extends IFloodlightService>,
IFloodlightService> m =
new HashMap<Class<? extends IFloodlightService>,
IFloodlightService>();
m.put(IVirtualNetworkService.class, this);
return m;
}
@Override
public Collection<Class<? extends IFloodlightService>> getModuleDependencies() {
Collection<Class<? extends IFloodlightService>> l =
new ArrayList<Class<? extends IFloodlightService>>();
l.add(IFloodlightProviderService.class);
l.add(IRestApiService.class);
return l;
}
@Override
public void init(FloodlightModuleContext context)
throws FloodlightModuleException {
floodlightProvider = context.getServiceImpl(IFloodlightProviderService.class);
restApi = context.getServiceImpl(IRestApiService.class);
nameToGuid = new ConcurrentHashMap<String, String>();
guidToGateway = new ConcurrentHashMap<String, Integer>();
gatewayToGuid = new ConcurrentHashMap<Integer, Set<String>>();
macToGuid = new ConcurrentHashMap<MACAddress, String>();
portToMac = new ConcurrentHashMap<String, MACAddress>();
macToGateway = new ConcurrentHashMap<MACAddress, Integer>();
}
@Override
public void startUp(FloodlightModuleContext context) {
floodlightProvider.addOFMessageListener(OFType.PACKET_IN, this);
restApi.addRestletRoutable(new VirtualNetworkWebRoutable());
}
// IOFMessageListener
@Override
public String getName() {
return "virtualizer";
}
@Override
public boolean isCallbackOrderingPrereq(OFType type, String name) {
// Link discovery should go before us so we don't block LLDPs
return (type.equals(OFType.PACKET_IN) && name.equals("linkdiscovery"));
}
@Override
public boolean isCallbackOrderingPostreq(OFType type, String name) {
// We need to go before forwarding
return (type.equals(OFType.PACKET_IN) && name.equals("forwarding"));
}
@Override
public Command receive(IOFSwitch sw, OFMessage msg, FloodlightContext cntx) {
switch (msg.getType()) {
case PACKET_IN:
return processPacketIn(sw, (OFPacketIn)msg, cntx);
}
log.warn("Received unexpected message {}", msg);
return Command.CONTINUE;
}
protected boolean isDefaultGatewayIp(String srcDevNetwork, IPv4 packet) {
return guidToGateway.get(srcDevNetwork).equals(packet.getDestinationAddress());
}
protected boolean oneSameNetwork(MACAddress src, MACAddress dst) {
String srcNetwork = macToGuid.get(src);
String dstNetwork = macToGuid.get(dst);
if (srcNetwork == null) return false;
if (dstNetwork == null) return false;
return srcNetwork.equals(dstNetwork);
}
protected Command processPacketIn(IOFSwitch sw, OFPacketIn msg, FloodlightContext cntx) {
Ethernet eth = IFloodlightProviderService.bcStore.get(cntx,
IFloodlightProviderService.CONTEXT_PI_PAYLOAD);
Command ret = Command.STOP;
String srcNetwork = macToGuid.get(eth.getSourceMAC());
if (srcNetwork == null && !(eth.getPayload() instanceof ARP)) {
log.debug("Blocking traffic from host {} because it is not attached to any network.",
HexString.toHexString(eth.getSourceMACAddress()));
ret = Command.STOP;
} else {
if (eth.isBroadcast() || eth.isMulticast()) {
return Command.CONTINUE;
}
if (oneSameNetwork(eth.getSourceMAC(), eth.getDestinationMAC())) {
// if they are on the same network continue
ret = Command.CONTINUE;
} else if ((eth.getPayload() instanceof IPv4)
&& isDefaultGatewayIp(srcNetwork, (IPv4)eth.getPayload())) {
// or if the host is talking to the gateway continue
ret = Command.CONTINUE;
} else if (eth.getPayload() instanceof ARP){
// We have to check here if it is an ARP reply from the default gateway
ARP arp = (ARP) eth.getPayload();
if (arp.getProtocolType() != ARP.PROTO_TYPE_IP) {
ret = Command.CONTINUE;
} else if (arp.getOpCode() == ARP.OP_REPLY) {
int ip = IPv4.toIPv4Address(arp.getSenderProtocolAddress());
for (Integer i : gatewayToGuid.keySet()) {
if (i.intValue() == ip) {
// Learn the default gateway MAC
if (log.isDebugEnabled()) {
log.debug("Adding {} with IP {} as a gateway",
HexString.toHexString(arp.getSenderHardwareAddress()),
IPv4.fromIPv4Address(ip));
}
macToGateway.put(new MACAddress(arp.getSenderHardwareAddress()), ip);
// Now we see if it's allowed for this packet
String hostNet = macToGuid.get(new MACAddress(eth.getDestinationMACAddress()));
Set<String> gwGuids = gatewayToGuid.get(ip);
if ((gwGuids != null) && (gwGuids.contains(hostNet)))
ret = Command.CONTINUE;
break;
}
}
}
}
if (ret == Command.CONTINUE) {
if (log.isTraceEnabled()) {
log.trace("Allowing flow between {} and {} on network {}",
new Object[] {eth.getSourceMAC(), eth.getDestinationMAC(), srcNetwork});
}
} else if (ret == Command.STOP) {
// they are on different virtual networks so we drop the flow
if (log.isTraceEnabled()) {
log.trace("Dropping flow between {} and {} because they are on different networks",
new Object[] {eth.getSourceMAC(), eth.getDestinationMAC()});
}
doDropFlow(sw, msg, cntx);
}
}
return ret;
}
protected void doDropFlow(IOFSwitch sw, OFPacketIn pi, FloodlightContext cntx) {
if (log.isTraceEnabled()) {
log.trace("doDropFlow pi={} srcSwitch={}",
new Object[] { pi, sw });
}
if (sw == null) {
log.warn("Switch is null, not installing drop flowmod for PacketIn {}", pi);
return;
}
// Create flow-mod based on packet-in and src-switch
OFFlowMod fm =
(OFFlowMod) floodlightProvider.getOFMessageFactory().getMessage(OFType.FLOW_MOD);
OFMatch match = new OFMatch();
match.loadFromPacket(pi.getPacketData(), pi.getInPort());
List<OFAction> actions = new ArrayList<OFAction>(); // no actions = drop
long cookie = AppCookie.makeCookie(APP_ID, 0);
fm.setCookie(cookie)
.setIdleTimeout(FLOW_MOD_DEFAULT_IDLE_TIMEOUT)
.setHardTimeout((short) 0)
.setBufferId(OFPacketOut.BUFFER_ID_NONE)
.setMatch(match)
.setActions(actions)
.setLengthU(OFFlowMod.MINIMUM_LENGTH);
fm.setFlags(OFFlowMod.OFPFF_SEND_FLOW_REM);
try {
if (log.isTraceEnabled()) {
log.trace("write drop flow-mod srcSwitch={} match={} " +
"pi={} flow-mod={}",
new Object[] {sw, match, pi, fm});
}
sw.write(fm, cntx);
} catch (IOException e) {
log.error("Failure writing drop flow mod", e);
}
return;
}
}
| src/main/java/net/floodlightcontroller/virtualnetwork/forwarding/VirtualNetworkFilter.java | package net.floodlightcontroller.virtualnetwork.forwarding;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.openflow.protocol.OFFlowMod;
import org.openflow.protocol.OFMatch;
import org.openflow.protocol.OFMessage;
import org.openflow.protocol.OFPacketIn;
import org.openflow.protocol.OFPacketOut;
import org.openflow.protocol.OFType;
import org.openflow.protocol.action.OFAction;
import org.openflow.util.HexString;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import net.floodlightcontroller.core.FloodlightContext;
import net.floodlightcontroller.core.IFloodlightProviderService;
import net.floodlightcontroller.core.IOFMessageListener;
import net.floodlightcontroller.core.IOFSwitch;
import net.floodlightcontroller.core.module.FloodlightModuleContext;
import net.floodlightcontroller.core.module.FloodlightModuleException;
import net.floodlightcontroller.core.module.IFloodlightModule;
import net.floodlightcontroller.core.module.IFloodlightService;
import net.floodlightcontroller.core.util.AppCookie;
import net.floodlightcontroller.packet.ARP;
import net.floodlightcontroller.packet.Ethernet;
import net.floodlightcontroller.packet.IPv4;
import net.floodlightcontroller.restserver.IRestApiService;
import net.floodlightcontroller.util.MACAddress;
import net.floodlightcontroller.virtualnetwork.IVirtualNetworkService;
/**
* A simple Layer 2 (MAC based) network virtualization module. This module allows
* you to create simple L2 networks (host + gateway) and will drop traffic if
* they are not on the same virtual network. This module does not support overlapping
* MAC address or IP address space. It also limits you to one default gateway per
* virtual network. It also must work in conjunction with the forwarding module.
* @author alexreimers
*/
public class VirtualNetworkFilter
implements IFloodlightModule, IVirtualNetworkService, IOFMessageListener {
protected static Logger log = LoggerFactory.getLogger(VirtualNetworkFilter.class);
private final short FLOW_MOD_DEFAULT_IDLE_TIMEOUT = 5; // in seconds
private final short APP_ID = 10; // TODO - check this
// Our dependencies
IFloodlightProviderService floodlightProvider;
IRestApiService restApi;
// Our internal state
protected Map<String, String> nameToGuid; // Logical name -> Network ID
protected Map<String, Integer> guidToGateway; // Network ID -> Gateway IP
protected Map<Integer, Set<String>> gatewayToGuid; // Gateway IP -> Network ID
protected Map<MACAddress, Integer> macToGateway; // Gateway MAC -> Gateway IP
protected Map<MACAddress, String> macToGuid; // Host MAC -> Network ID
protected Map<String, MACAddress> portToMac; // Host MAC -> logical port name
protected void addGateway(String guid, Integer ip) {
if (ip.intValue() != 0) {
guidToGateway.put(guid, ip);
if (gatewayToGuid.containsKey(ip)) {
Set<String> gSet = gatewayToGuid.get(ip);
gSet.add(guid);
} else {
Set<String> gSet = Collections.synchronizedSet(new HashSet<String>());
gSet.add(guid);
gatewayToGuid.put(ip, gSet);
}
}
}
protected void deleteGateway(String guid) {
Integer gwIp = guidToGateway.remove(guid);
if (gwIp == null) return;
Set<String> gSet = gatewayToGuid.get(gwIp);
gSet.remove(guid);
}
// IVirtualNetworkService
@Override
public void createNetwork(String guid, String network, Integer gateway) {
if (log.isDebugEnabled()) {
String gw = null;
try {
gw = IPv4.fromIPv4Address(gateway);
} catch (Exception e) {
// fail silently
}
log.debug("Creating network {} with ID {} and gateway {}",
new Object[] {network, guid, gw});
}
if (!nameToGuid.isEmpty()) {
// We have to iterate all the networks to handle name/gateway changes
for (Entry<String, String> entry : nameToGuid.entrySet()) {
if (entry.getValue().equals(guid)) {
nameToGuid.remove(entry.getKey());
break;
}
}
}
nameToGuid.put(network, guid);
// If they don't specify a new gateway the old one will be preserved
if ((gateway != null) && (gateway != 0))
addGateway(guid, gateway);
}
@Override
public void deleteNetwork(String guid) {
String name = null;
if (nameToGuid.isEmpty()) {
log.warn("Could not delete network with ID {}, network doesn't exist",
guid);
return;
}
for (Entry<String, String> entry : nameToGuid.entrySet()) {
if (entry.getValue().equals(guid)) {
name = entry.getKey();
break;
}
log.warn("Could not delete network with ID {}, network doesn't exist",
guid);
}
if (log.isDebugEnabled())
log.debug("Deleting network with name {} ID {}", name, guid);
nameToGuid.remove(name);
deleteGateway(guid);
Collection<MACAddress> deleteList = new ArrayList<MACAddress>();
for (MACAddress host : macToGuid.keySet()) {
if (macToGuid.get(host).equals(guid)) {
deleteList.add(host);
}
}
for (MACAddress mac : deleteList) {
if (log.isDebugEnabled()) {
log.debug("Removing host {} from network {}",
HexString.toHexString(mac.toBytes()), guid);
}
macToGuid.remove(mac);
for (Entry<String, MACAddress> entry : portToMac.entrySet()) {
if (entry.getValue().equals(mac)) {
portToMac.remove(entry.getKey());
break;
}
}
}
}
@Override
public void addHost(MACAddress mac, String guid, String port) {
if (guid != null) {
if (log.isDebugEnabled()) {
log.debug("Adding {} to network ID {} on port {}",
new Object[] {mac, guid, port});
}
// We ignore old mappings
macToGuid.put(mac, guid);
portToMac.put(port, mac);
} else {
log.warn("Could not add MAC {} to network ID {} on port {}, the network does not exist",
new Object[] {mac, guid, port});
}
}
@Override
public void deleteHost(MACAddress mac, String port) {
if (log.isDebugEnabled()) {
log.debug("Removing host {} from port {}", mac, port);
}
if (mac == null && port == null) return;
if (port != null) {
MACAddress host = portToMac.remove(port);
macToGuid.remove(host);
} else if (mac != null) {
if (!portToMac.isEmpty()) {
for (Entry<String, MACAddress> entry : portToMac.entrySet()) {
if (entry.getValue().equals(mac)) {
portToMac.remove(entry.getKey());
macToGuid.remove(entry.getValue());
return;
}
}
}
}
}
@Override
public Collection<Class<? extends IFloodlightService>> getModuleServices() {
Collection<Class<? extends IFloodlightService>> l =
new ArrayList<Class<? extends IFloodlightService>>();
l.add(IVirtualNetworkService.class);
return l;
}
@Override
public Map<Class<? extends IFloodlightService>, IFloodlightService>
getServiceImpls() {
Map<Class<? extends IFloodlightService>,
IFloodlightService> m =
new HashMap<Class<? extends IFloodlightService>,
IFloodlightService>();
m.put(IVirtualNetworkService.class, this);
return m;
}
@Override
public Collection<Class<? extends IFloodlightService>> getModuleDependencies() {
Collection<Class<? extends IFloodlightService>> l =
new ArrayList<Class<? extends IFloodlightService>>();
l.add(IFloodlightProviderService.class);
l.add(IRestApiService.class);
return l;
}
@Override
public void init(FloodlightModuleContext context)
throws FloodlightModuleException {
floodlightProvider = context.getServiceImpl(IFloodlightProviderService.class);
restApi = context.getServiceImpl(IRestApiService.class);
nameToGuid = new ConcurrentHashMap<String, String>();
guidToGateway = new ConcurrentHashMap<String, Integer>();
gatewayToGuid = new ConcurrentHashMap<Integer, Set<String>>();
macToGuid = new ConcurrentHashMap<MACAddress, String>();
portToMac = new ConcurrentHashMap<String, MACAddress>();
macToGateway = new ConcurrentHashMap<MACAddress, Integer>();
}
@Override
public void startUp(FloodlightModuleContext context) {
floodlightProvider.addOFMessageListener(OFType.PACKET_IN, this);
restApi.addRestletRoutable(new VirtualNetworkWebRoutable());
}
// IOFMessageListener
@Override
public String getName() {
return "virtualizer";
}
@Override
public boolean isCallbackOrderingPrereq(OFType type, String name) {
// Link discovery should go before us so we don't block LLDPs
return (type.equals(OFType.PACKET_IN) && name.equals("linkdiscovery"));
}
@Override
public boolean isCallbackOrderingPostreq(OFType type, String name) {
// We need to go before forwarding
return (type.equals(OFType.PACKET_IN) && name.equals("forwarding"));
}
@Override
public Command receive(IOFSwitch sw, OFMessage msg, FloodlightContext cntx) {
switch (msg.getType()) {
case PACKET_IN:
return processPacketIn(sw, (OFPacketIn)msg, cntx);
}
log.warn("Received unexpected message {}", msg);
return Command.CONTINUE;
}
protected boolean isDefaultGatewayIp(String srcDevNetwork, IPv4 packet) {
return guidToGateway.get(srcDevNetwork).equals(packet.getDestinationAddress());
}
protected boolean oneSameNetwork(MACAddress src, MACAddress dst) {
String srcNetwork = macToGuid.get(src);
String dstNetwork = macToGuid.get(dst);
if (srcNetwork == null) return false;
if (dstNetwork == null) return false;
return srcNetwork.equals(dstNetwork);
}
protected Command processPacketIn(IOFSwitch sw, OFPacketIn msg, FloodlightContext cntx) {
Ethernet eth = IFloodlightProviderService.bcStore.get(cntx,
IFloodlightProviderService.CONTEXT_PI_PAYLOAD);
Command ret = Command.STOP;
String srcNetwork = macToGuid.get(eth.getSourceMAC());
if (srcNetwork == null && !(eth.getPayload() instanceof ARP)) {
log.debug("Blocking traffic from host {} because it is not attached to any network.",
HexString.toHexString(eth.getSourceMACAddress()));
ret = Command.STOP;
} else {
if (eth.isBroadcast() || eth.isMulticast()) {
return Command.CONTINUE;
}
if (oneSameNetwork(eth.getSourceMAC(), eth.getDestinationMAC())) {
// if they are on the same network continue
ret = Command.CONTINUE;
} else if ((eth.getPayload() instanceof IPv4)
&& isDefaultGatewayIp(srcNetwork, (IPv4)eth.getPayload())) {
// or if the host is talking to the gateway continue
ret = Command.CONTINUE;
} else if (eth.getPayload() instanceof ARP){
// We have to check here if it is an ARP reply from the default gateway
ARP arp = (ARP) eth.getPayload();
if (arp.getProtocolType() != ARP.PROTO_TYPE_IP) {
ret = Command.CONTINUE;
} else if (arp.getOpCode() == ARP.OP_REPLY) {
int ip = IPv4.toIPv4Address(arp.getSenderProtocolAddress());
for (Integer i : gatewayToGuid.keySet()) {
if (i.intValue() == ip) {
// Learn the default gateway MAC
if (log.isDebugEnabled()) {
log.debug("Adding {} with IP {} as a gateway",
HexString.toHexString(arp.getSenderHardwareAddress()),
IPv4.fromIPv4Address(ip));
}
macToGateway.put(new MACAddress(arp.getSenderHardwareAddress()), ip);
// Now we see if it's allowed for this packet
String hostNet = macToGuid.get(new MACAddress(eth.getDestinationMACAddress()));
Set<String> gwGuids = gatewayToGuid.get(ip);
if ((gwGuids != null) && (gwGuids.contains(hostNet)))
ret = Command.CONTINUE;
break;
}
}
}
}
if (ret == Command.CONTINUE) {
if (log.isTraceEnabled()) {
log.trace("Allowing flow between {} and {} on network {}",
new Object[] {eth.getSourceMAC(), eth.getDestinationMAC(), srcNetwork});
}
} else if (ret == Command.STOP) {
// they are on different virtual networks so we drop the flow
if (log.isTraceEnabled()) {
log.trace("Dropping flow between {} and {} because they are on different networks",
new Object[] {eth.getSourceMAC(), eth.getDestinationMAC()});
}
doDropFlow(sw, msg, cntx);
}
}
return ret;
}
protected void doDropFlow(IOFSwitch sw, OFPacketIn pi, FloodlightContext cntx) {
if (log.isTraceEnabled()) {
log.trace("doDropFlow pi={} srcSwitch={}",
new Object[] { pi, sw });
}
if (sw == null) {
log.warn("Switch is null, not installing drop flowmod for PacketIn {}", pi);
return;
}
// Create flow-mod based on packet-in and src-switch
OFFlowMod fm =
(OFFlowMod) floodlightProvider.getOFMessageFactory().getMessage(OFType.FLOW_MOD);
OFMatch match = new OFMatch();
match.loadFromPacket(pi.getPacketData(), pi.getInPort());
List<OFAction> actions = new ArrayList<OFAction>(); // no actions = drop
long cookie = AppCookie.makeCookie(APP_ID, 0);
fm.setCookie(cookie)
.setIdleTimeout(FLOW_MOD_DEFAULT_IDLE_TIMEOUT)
.setBufferId(OFPacketOut.BUFFER_ID_NONE)
.setMatch(match)
.setActions(actions)
.setLengthU(OFFlowMod.MINIMUM_LENGTH);
fm.setFlags(OFFlowMod.OFPFF_SEND_FLOW_REM);
try {
if (log.isTraceEnabled()) {
log.trace("write drop flow-mod srcSwitch={} match={} " +
"pi={} flow-mod={}",
new Object[] {sw, match, pi, fm});
}
sw.write(fm, cntx);
} catch (IOException e) {
log.error("Failure writing drop flow mod", e);
}
return;
}
}
| Change app ID for VirtualNetworkFilter.
| src/main/java/net/floodlightcontroller/virtualnetwork/forwarding/VirtualNetworkFilter.java | Change app ID for VirtualNetworkFilter. | <ide><path>rc/main/java/net/floodlightcontroller/virtualnetwork/forwarding/VirtualNetworkFilter.java
<ide> /**
<ide> * A simple Layer 2 (MAC based) network virtualization module. This module allows
<ide> * you to create simple L2 networks (host + gateway) and will drop traffic if
<del> * they are not on the same virtual network. This module does not support overlapping
<del> * MAC address or IP address space. It also limits you to one default gateway per
<del> * virtual network. It also must work in conjunction with the forwarding module.
<add> * they are not on the same virtual network.
<add> *
<add> * LIMITATIONS
<add> * - This module does not allow overlapping of IPs or MACs
<add> * - You can only have 1 gateway per virtual network (can be shared)
<add> * - There is filtering of multicast/broadcast traffic
<add> *
<ide> * @author alexreimers
<ide> */
<ide> public class VirtualNetworkFilter
<ide> protected static Logger log = LoggerFactory.getLogger(VirtualNetworkFilter.class);
<ide>
<ide> private final short FLOW_MOD_DEFAULT_IDLE_TIMEOUT = 5; // in seconds
<del> private final short APP_ID = 10; // TODO - check this
<add> private final short APP_ID = 20;
<ide>
<ide> // Our dependencies
<ide> IFloodlightProviderService floodlightProvider;
<ide> long cookie = AppCookie.makeCookie(APP_ID, 0);
<ide> fm.setCookie(cookie)
<ide> .setIdleTimeout(FLOW_MOD_DEFAULT_IDLE_TIMEOUT)
<add> .setHardTimeout((short) 0)
<ide> .setBufferId(OFPacketOut.BUFFER_ID_NONE)
<ide> .setMatch(match)
<ide> .setActions(actions) |
|
JavaScript | mit | bdf5e4fe6056309fab7eb86851d6e8ed3fd808f9 | 0 | Frojd/wagtail-geo-widget,Frojd/wagtail-geo-widget,Frojd/wagtail-geo-widget,Frojd/wagtail-geo-widget | "use strict";
function GeoField(options) {
var defaultLocation = options.defaultLocation;
defaultLocation = new google.maps.LatLng(
parseFloat(defaultLocation.lat),
parseFloat(defaultLocation.lng)
);
this.zoom = options.zoom;
this.srid = options.srid;
this.sourceField = $(options.sourceSelector);
this.addressField = $(options.addressSelector);
this.latLngField = $(options.latLngDisplaySelector);
this.geocoder = new google.maps.Geocoder();
this.geoWarningClassName = 'wagtailgeowidget__geo-warning';
this.geoSuccessClassName = 'wagtailgeowidget__geo-success';
this.initMap(options.mapEl, defaultLocation);
this.initEvents();
this.setMapPosition(defaultLocation);
this.updateLatLng(defaultLocation);
}
GeoField.prototype.initMap = function(mapEl, defaultLocation) {
var map = new google.maps.Map(mapEl, {
zoom: this.zoom,
center: defaultLocation
});
var marker = new google.maps.Marker({
position: defaultLocation,
map: map,
draggable: true
});
this.map = map;
this.marker = marker;
this.checkVisibility();
}
GeoField.prototype.initEvents = function() {
var self = this;
var autocomplete = new google.maps.places.Autocomplete(this.addressField[0]);
this.addressField.on("keydown", function(e) {
if (e.keyCode === 13) {
e.preventDefault();
e.stopPropagation();
self.geocodeSearch($(this).val());
}
});
google.maps.event.addListener(this.marker, "dragend", function(event) {
self.setMapPosition(event.latLng);
self.updateLatLng(event.latLng);
self.writeLocation(event.latLng);
});
this.latLngField.on("input", function(e) {
var coords = $(this).val();
self.updateMapFromCoords(coords);
});
this.addressField.on("input", function(e) {
clearTimeout(self._timeoutId);
var query = $(this).val();
if (query === "") {
self.clearWarning();
self.clearSuccess();
return;
}
self._timeoutId = setTimeout(function() {
self.geocodeSearch(query);
}, 400);
});
}
GeoField.prototype.displayWarning = function(msg) {
var self = this;
self.clearSuccess();
self.clearWarning();
var warningMsg = document.createElement('p');
warningMsg.className = 'help-block help-warning ' + self.geoWarningClassName;
warningMsg.innerHTML = msg;
$(warningMsg).insertAfter(self.addressField);
}
GeoField.prototype.displaySuccess = function(msg) {
var self = this;
clearTimeout(self._successTimeout);
self.clearSuccess();
self.clearWarning();
var successMessage = document.createElement('p');
successMessage.className = 'help-block help-info ' + self.geoSuccessClassName;
successMessage.innerHTML = 'Address has been successfully geo-coded';
$(successMessage).insertAfter(self.addressField);
self._successTimeout = setTimeout(function() {
self.clearSuccess();
}, 3000);
}
GeoField.prototype.clearWarning = function() {
var self = this;
$('.' + this.geoWarningClassName).remove();
}
GeoField.prototype.clearSuccess = function() {
var self = this;
$('.' + this.geoSuccessClassName).remove();
}
GeoField.prototype.checkVisibility = function() {
var self = this;
this.timeout = setTimeout(function (){
var visible = $(self.map.getDiv()).is(':visible')
if (visible) {
clearTimeout(self.timeout);
google.maps.event.trigger(self.map, 'resize');
var coords = $(self.latLngField).val();
self.updateMapFromCoords(coords)
} else {
self.checkVisibility();
}
}, 1000);
}
GeoField.prototype.geocodeSearch = function(query) {
var self = this;
this.geocoder.geocode({'address': query}, function(results, status) {
if (status === google.maps.GeocoderStatus.ZERO_RESULTS || !results.length) {
self.displayWarning('Could not geocode adddress. The map may not be in sync with the address entered.');
return;
}
if (status !== google.maps.GeocoderStatus.OK) {
self.displayWarning('Google Maps Error: '+status);
return;
}
self.displaySuccess();
var latLng = results[0].geometry.location;
self.setMapPosition(latLng);
self.updateLatLng(latLng);
self.writeLocation(latLng);
});
}
GeoField.prototype.updateLatLng = function(latLng) {
this.latLngField.val(latLng.lat()+","+latLng.lng());
}
GeoField.prototype.updateMapFromCoords = function(coords) {
coords = coords.split(",").map(function(value) {
return parseFloat(value);
});
var latLng = new google.maps.LatLng(
coords[0],
coords[1]
);
this.setMapPosition(latLng);
}
GeoField.prototype.setMapPosition = function(latLng) {
this.marker.setPosition(latLng);
this.map.setCenter(latLng);
}
GeoField.prototype.writeLocation = function(latLng) {
var lat = latLng.lat();
var lng = latLng.lng();
var value = 'SRID=' + this.srid + ';POINT(' + lng + ' ' +lat+')';
this.sourceField.val(value);
}
var initializeGeoFields = function() {
$(".geo-field").each(function(index, el) {
var $el = $(el);
var data = window[$el.data('data-id')];
var options = {
mapEl: el,
sourceSelector: $(data.sourceSelector),
latLngDisplaySelector: $(data.latLngDisplaySelector),
zoom: data.zoom,
srid: data.srid,
}
options.addressSelector = data.addressSelector;
options.defaultLocation = data.defaultLocation;
new GeoField(options);
});
}
$(document).ready(function() {
google.maps.event.addDomListener(window, 'load', initializeGeoFields);
});
| wagtailgeowidget/static/wagtailgeowidget/js/geo-field.js | "use strict";
function GeoField(options) {
var defaultLocation = options.defaultLocation;
defaultLocation = new google.maps.LatLng(
parseFloat(defaultLocation.lat),
parseFloat(defaultLocation.lng)
);
this.zoom = options.zoom;
this.srid = options.srid;
this.sourceField = $(options.sourceSelector);
this.addressField = $(options.addressSelector);
this.latLngField = $(options.latLngDisplaySelector);
this.geocoder = new google.maps.Geocoder();
this.geoWarningClassName = 'wagtailgeowidget__geo-warning';
this.geoSuccessClassName = 'wagtailgeowidget__geo-success';
this.initMap(options.mapEl, defaultLocation);
this.initEvents();
this.setMapPosition(defaultLocation);
this.updateLatLng(defaultLocation);
}
GeoField.prototype.initMap = function(mapEl, defaultLocation) {
var map = new google.maps.Map(mapEl, {
zoom: this.zoom,
center: defaultLocation
});
var marker = new google.maps.Marker({
position: defaultLocation,
map: map,
draggable: true
});
this.map = map;
this.marker = marker;
this.checkVisibility();
}
GeoField.prototype.initEvents = function() {
var self = this;
var autocomplete = new google.maps.places.Autocomplete(this.addressField[0]);
this.addressField.on("keydown", function(e) {
if (e.keyCode === 13) {
e.preventDefault();
e.stopPropagation();
self.geocodeSearch($(this).val());
}
});
google.maps.event.addListener(this.marker, "dragend", function(event) {
self.setMapPosition(event.latLng);
self.updateLatLng(event.latLng);
self.writeLocation(event.latLng);
});
this.latLngField.on("input", function(e) {
var coords = $(this).val();
self.updateMapFromCoords(coords);
});
this.addressField.on("input", function(e) {
clearTimeout(self._timeoutId);
var query = $(this).val();
if (query === "") {
self.clearWarning();
self.clearSuccess();
return;
}
self._timeoutId = setTimeout(function() {
self.geocodeSearch(query);
}, 400);
});
}
GeoField.prototype.displayWarning = function(msg) {
var self = this;
self.clearSuccess();
self.clearWarning();
var warningMsg = document.createElement('p');
warningMsg.className = 'help-block help-warning ' + self.geoWarningClassName;
warningMsg.innerHTML = msg;
$(warningMsg).insertAfter(self.addressField);
}
GeoField.prototype.displaySuccess = function(msg) {
var self = this;
clearTimeout(self._successTimeout);
self.clearSuccess();
self.clearWarning();
var successMessage = document.createElement('p');
successMessage.className = 'help-block help-info ' + self.geoSuccessClassName;
successMessage.innerHTML = 'Address has been successfully geo-coded';
$(successMessage).insertAfter(self.addressField);
self._successTimeout = setTimeout(function() {
self.clearSuccess();
}, 3000);
}
GeoField.prototype.clearWarning = function() {
var self = this;
$('.' + self.geoWarningClassName).remove();
}
GeoField.prototype.clearSuccess = function() {
var self = this;
$('.' + self.geoSuccessClassName).remove();
}
GeoField.prototype.checkVisibility = function() {
var self = this;
this.timeout = setTimeout(function (){
var visible = $(self.map.getDiv()).is(':visible')
if (visible) {
clearTimeout(self.timeout);
google.maps.event.trigger(self.map, 'resize');
var coords = $(self.latLngField).val();
self.updateMapFromCoords(coords)
} else {
self.checkVisibility();
}
}, 1000);
}
GeoField.prototype.geocodeSearch = function(query) {
var self = this;
this.geocoder.geocode({'address': query}, function(results, status) {
if (status === google.maps.GeocoderStatus.ZERO_RESULTS || !results.length) {
self.displayWarning('Could not geocode adddress. The map may not be in sync with the address entered.');
return;
}
if (status !== google.maps.GeocoderStatus.OK) {
self.displayWarning('Google Maps Error: '+status);
return;
}
self.displaySuccess();
var latLng = results[0].geometry.location;
self.setMapPosition(latLng);
self.updateLatLng(latLng);
self.writeLocation(latLng);
});
}
GeoField.prototype.updateLatLng = function(latLng) {
this.latLngField.val(latLng.lat()+","+latLng.lng());
}
GeoField.prototype.updateMapFromCoords = function(coords) {
coords = coords.split(",").map(function(value) {
return parseFloat(value);
});
var latLng = new google.maps.LatLng(
coords[0],
coords[1]
);
this.setMapPosition(latLng);
}
GeoField.prototype.setMapPosition = function(latLng) {
this.marker.setPosition(latLng);
this.map.setCenter(latLng);
}
GeoField.prototype.writeLocation = function(latLng) {
var lat = latLng.lat();
var lng = latLng.lng();
var value = 'SRID='+this.srid+';POINT('+lng+' '+lat+')';
this.sourceField.val(value);
}
var initializeGeoFields = function() {
$(".geo-field").each(function(index, el) {
var $el = $(el);
var data = window[$el.data('data-id')];
var options = {
mapEl: el,
sourceSelector: $(data.sourceSelector),
latLngDisplaySelector: $(data.latLngDisplaySelector),
zoom: data.zoom,
srid: data.srid,
}
options.addressSelector = data.addressSelector;
options.defaultLocation = data.defaultLocation;
new GeoField(options);
});
}
$(document).ready(function() {
google.maps.event.addDomListener(window, 'load', initializeGeoFields);
});
| Style updates (linting/spacing)
| wagtailgeowidget/static/wagtailgeowidget/js/geo-field.js | Style updates (linting/spacing) | <ide><path>agtailgeowidget/static/wagtailgeowidget/js/geo-field.js
<ide>
<ide> var autocomplete = new google.maps.places.Autocomplete(this.addressField[0]);
<ide> this.addressField.on("keydown", function(e) {
<del> if (e.keyCode === 13) {
<del> e.preventDefault();
<del> e.stopPropagation();
<del> self.geocodeSearch($(this).val());
<del> }
<add> if (e.keyCode === 13) {
<add> e.preventDefault();
<add> e.stopPropagation();
<add> self.geocodeSearch($(this).val());
<add> }
<ide> });
<ide>
<ide> google.maps.event.addListener(this.marker, "dragend", function(event) {
<ide> $(successMessage).insertAfter(self.addressField);
<ide>
<ide> self._successTimeout = setTimeout(function() {
<del> self.clearSuccess();
<add> self.clearSuccess();
<ide> }, 3000);
<ide> }
<ide>
<ide> GeoField.prototype.clearWarning = function() {
<ide> var self = this;
<del> $('.' + self.geoWarningClassName).remove();
<add> $('.' + this.geoWarningClassName).remove();
<ide> }
<ide>
<ide> GeoField.prototype.clearSuccess = function() {
<ide> var self = this;
<del> $('.' + self.geoSuccessClassName).remove();
<add> $('.' + this.geoSuccessClassName).remove();
<ide> }
<ide>
<ide> GeoField.prototype.checkVisibility = function() {
<del> var self = this;
<del> this.timeout = setTimeout(function (){
<del> var visible = $(self.map.getDiv()).is(':visible')
<del> if (visible) {
<del> clearTimeout(self.timeout);
<del> google.maps.event.trigger(self.map, 'resize');
<del> var coords = $(self.latLngField).val();
<del> self.updateMapFromCoords(coords)
<del> } else {
<del> self.checkVisibility();
<del> }
<del> }, 1000);
<add> var self = this;
<add> this.timeout = setTimeout(function (){
<add> var visible = $(self.map.getDiv()).is(':visible')
<add> if (visible) {
<add> clearTimeout(self.timeout);
<add> google.maps.event.trigger(self.map, 'resize');
<add> var coords = $(self.latLngField).val();
<add> self.updateMapFromCoords(coords)
<add> } else {
<add> self.checkVisibility();
<add> }
<add> }, 1000);
<ide> }
<ide>
<ide> GeoField.prototype.geocodeSearch = function(query) {
<ide> }
<ide>
<ide> GeoField.prototype.updateMapFromCoords = function(coords) {
<del> coords = coords.split(",").map(function(value) {
<del> return parseFloat(value);
<del> });
<del>
<del> var latLng = new google.maps.LatLng(
<del> coords[0],
<del> coords[1]
<del> );
<del> this.setMapPosition(latLng);
<add> coords = coords.split(",").map(function(value) {
<add> return parseFloat(value);
<add> });
<add>
<add> var latLng = new google.maps.LatLng(
<add> coords[0],
<add> coords[1]
<add> );
<add> this.setMapPosition(latLng);
<ide> }
<ide>
<ide> GeoField.prototype.setMapPosition = function(latLng) {
<ide> GeoField.prototype.writeLocation = function(latLng) {
<ide> var lat = latLng.lat();
<ide> var lng = latLng.lng();
<del> var value = 'SRID='+this.srid+';POINT('+lng+' '+lat+')';
<add> var value = 'SRID=' + this.srid + ';POINT(' + lng + ' ' +lat+')';
<ide>
<ide> this.sourceField.val(value);
<ide> } |
|
Java | bsd-3-clause | 28c2c6854ee46ab5464c64c0965b8d1e44148061 | 0 | dagi/GoodData-CL,dagi/GoodData-CL,gooddata/GoodData-CL,dagi/GoodData-CL,gooddata/GoodData-CL,gooddata/GoodData-CL | /*
* Copyright (c) 2009, GoodData Corporation. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided
* that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this list of conditions and
* the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice, this list of conditions
* and the following disclaimer in the documentation and/or other materials provided with the distribution.
* * Neither the name of the GoodData Corporation nor the names of its contributors may be used to endorse
* or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
* OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.gooddata.processor;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.io.StringReader;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator;
import com.gooddata.connector.Connector;
import com.gooddata.connector.CsvConnector;
import com.gooddata.connector.DateDimensionConnector;
import com.gooddata.connector.GaConnector;
import com.gooddata.connector.JdbcConnector;
import com.gooddata.connector.SfdcConnector;
import com.gooddata.connector.backend.ConnectorBackend;
import com.gooddata.connector.backend.DerbyConnectorBackend;
import com.gooddata.connector.backend.MySqlConnectorBackend;
import com.gooddata.exception.GdcException;
import com.gooddata.exception.GdcLoginException;
import com.gooddata.exception.GdcRestApiException;
import com.gooddata.exception.HttpMethodException;
import com.gooddata.exception.InternalErrorException;
import com.gooddata.exception.InvalidArgumentException;
import com.gooddata.exception.InvalidCommandException;
import com.gooddata.exception.InvalidParameterException;
import com.gooddata.exception.ModelException;
import com.gooddata.exception.ProcessingException;
import com.gooddata.exception.SfdcException;
import com.gooddata.integration.rest.configuration.NamePasswordConfiguration;
import com.gooddata.naming.N;
import com.gooddata.processor.parser.DIScriptParser;
import com.gooddata.processor.parser.ParseException;
import com.gooddata.util.FileUtil;
/**
* The GoodData Data Integration CLI processor.
*
* @author jiri.zaloudek
* @author Zdenek Svoboda <[email protected]>
* @version 1.0
*/
public class GdcDI implements Executor {
private static Logger l = Logger.getLogger(GdcDI.class);
//Options data
public static String[] CLI_PARAM_USERNAME = {"username","u"};
public static String[] CLI_PARAM_PASSWORD = {"password","p"};
public static String[] CLI_PARAM_HOST = {"host","h"};
public static String[] CLI_PARAM_FTP_HOST = {"ftphost","f"};
public static String[] CLI_PARAM_PROJECT = {"project","i"};
public static String[] CLI_PARAM_BACKEND = {"backend","b"};
public static String[] CLI_PARAM_DB_USERNAME = {"dbusername","d"};
public static String[] CLI_PARAM_DB_PASSWORD = {"dbpassword","c"};
public static String[] CLI_PARAM_PROTO = {"proto","t"};
public static String[] CLI_PARAM_EXECUTE = {"execute","e"};
public static String CLI_PARAM_SCRIPT = "script";
private static String DEFAULT_PROPERTIES = "gdi.properties";
// mandatory options
public static Option[] mandatoryOptions = { };
// optional options
public static Option[] optionalOptions = {
new Option(CLI_PARAM_USERNAME[1], CLI_PARAM_USERNAME[0], true, "GoodData username"),
new Option(CLI_PARAM_PASSWORD[1], CLI_PARAM_PASSWORD[0], true, "GoodData password"),
new Option(CLI_PARAM_HOST[1], CLI_PARAM_HOST[0], true, "GoodData host"),
new Option(CLI_PARAM_FTP_HOST[1], CLI_PARAM_FTP_HOST[0], true, "GoodData FTP host"),
new Option(CLI_PARAM_PROJECT[1], CLI_PARAM_PROJECT[0], true, "GoodData project identifier (a string like nszfbgkr75otujmc4smtl6rf5pnmz9yl)"),
new Option(CLI_PARAM_BACKEND[1], CLI_PARAM_BACKEND[0], true, "Database backend DERBY or MYSQL"),
new Option(CLI_PARAM_DB_USERNAME[1], CLI_PARAM_DB_USERNAME[0], true, "Database backend username (not required for the local Derby SQL)"),
new Option(CLI_PARAM_DB_PASSWORD[1], CLI_PARAM_DB_PASSWORD[0], true, "Database backend password (not required for the local Derby SQL)"),
new Option(CLI_PARAM_PROTO[1], CLI_PARAM_PROTO[0], true, "HTTP or HTTPS"),
new Option(CLI_PARAM_EXECUTE[1], CLI_PARAM_EXECUTE[0], true, "Commands and params to execute before the commands in provided files")
};
private CliParams cliParams = null;
private Connector[] connectors = null;
private ProcessingContext context = new ProcessingContext();
private boolean finishedSucessfuly = false;
private static long LOCK_EXPIRATION_TIME = 1000 * 3600; // 1 hour
private GdcDI(CommandLine ln, Properties defaults) {
try {
cliParams = parse(ln, defaults);
cliParams.setHttpConfig(new NamePasswordConfiguration(
cliParams.get(CLI_PARAM_PROTO[0]), cliParams.get(CLI_PARAM_HOST[0]),
cliParams.get(CLI_PARAM_USERNAME[0]), cliParams.get(CLI_PARAM_PASSWORD[0])));
cliParams.setFtpConfig(new NamePasswordConfiguration(
cliParams.get(CLI_PARAM_PROTO[0]), cliParams.get(CLI_PARAM_FTP_HOST[0]),
cliParams.get(CLI_PARAM_USERNAME[0]), cliParams.get(CLI_PARAM_PASSWORD[0])));
ConnectorBackend backend = null;
try {
backend = instantiateConnectorBackend();
connectors = instantiateConnectors(backend);
String execute = cliParams.get(CLI_PARAM_EXECUTE[0]);
String scripts = cliParams.get(CLI_PARAM_SCRIPT);
if(execute!= null && scripts != null && execute.length()>0 && scripts.length()>0) {
throw new InvalidArgumentException("You can't execute a script and use the -e command line parameter at the same time.");
}
if(execute!= null && execute.length() > 0) {
l.debug("Executing arg="+execute);
execute(execute);
}
if(scripts!= null && scripts.length() > 0) {
String[] sas = scripts.split(",");
for(String script : sas) {
l.debug("Executing file="+script);
execute(new File(script));
}
}
finishedSucessfuly = true;
} finally {
if (backend != null) {
backend.close();
}
}
}
catch (InvalidArgumentException e) {
l.error("Invalid command line argument: "+e.getMessage());
Throwable c = e.getCause();
while(c!=null) {
l.error("Caused by: "+c.getMessage());
c = c.getCause();
}
l.debug("Invalid command line argument:",e);
l.info(commandsHelp());
}
catch (InvalidCommandException e) {
l.error("Invalid command: "+e.getMessage());
Throwable c = e.getCause();
while(c!=null) {
l.error("Caused by: "+c.getMessage());
c = c.getCause();
}
l.debug("Invalid command.",e);
}
catch (InvalidParameterException e) {
l.error("Invalid command parameter: "+e.getMessage());
Throwable c = e.getCause();
while(c!=null) {
l.error("Caused by: "+c.getMessage());
c = c.getCause();
}
l.debug("Invalid command parameter.",e);
}
catch (SfdcException e) {
l.error("Error communicating with SalesForce: "+e.getMessage());
Throwable c = e.getCause();
while(c!=null) {
l.error("Caused by: "+c.getMessage());
c = c.getCause();
}
l.debug("Error communicating with SalesForce.",e);
}
catch (ProcessingException e) {
l.error("Error processing command: "+e.getMessage());
Throwable c = e.getCause();
while(c!=null) {
l.error("Caused by: "+c.getMessage());
c = c.getCause();
}
l.debug("Error processing command.",e);
}
catch (ModelException e) {
l.error("Model issue: "+e.getMessage());
Throwable c = e.getCause();
while(c!=null) {
l.error("Caused by: "+c.getMessage());
c = c.getCause();
}
l.debug("Model issue.",e);
}
catch (GdcLoginException e) {
l.error("Error logging to GoodData. Please check your GoodData username and password: "+e.getMessage());
Throwable c = e.getCause();
while(c!=null) {
l.error("Caused by: "+c.getMessage());
c = c.getCause();
}
l.debug("Error logging to GoodData. Please check your GoodData username and password.",e);
}
catch (IOException e) {
l.error("Encountered an IO problem. Please check that all files that you use in your command line arguments and commands exist. More info: '"+e.getMessage()+"'");
Throwable c = e.getCause();
while(c!=null) {
l.error("Caused by: "+c.getMessage());
c = c.getCause();
}
l.debug("Encountered an IO problem. Please check that all files that you use in your command line arguments and commands exist. More info: '"+e.getMessage()+"'",e);
}
catch (InternalErrorException e) {
Throwable c = e.getCause();
if( c != null && c instanceof SQLException) {
l.error("Error extracting data. Can't process the incoming data. Please check the CSV file " +
"separator and consistency (same number of columns in each row). Also, please make sure " +
"that the number of columns in your XML config file matches the number of rows in your " +
"data source. Make sure that your file is readable by other users (particularly the mysql user). " +
"More info: '"+c.getMessage()+"'");
l.debug("Error extracting data. Can't process the incoming data. Please check the CSV file " +
"separator and consistency (same number of columns in each row). Also, please make sure " +
"that the number of columns in your XML config file matches the number of rows in your " +
"data source. Make sure that your file is readable by other users (particularly the mysql user). " +
"More info: '"+c.getMessage()+"'",c);
}
else {
l.error("Internal error: "+e.getMessage());
c = e.getCause();
while(c!=null) {
l.error("Caused by: "+c.getMessage());
c = c.getCause();
}
l.debug("REST API invocation error: ",e);
}
}
catch (HttpMethodException e) {
l.error("Error executing GoodData REST API: "+e.getMessage());
Throwable c = e.getCause();
while(c!=null) {
l.error("Caused by: "+c.getMessage());
c = c.getCause();
}
l.debug("Error executing GoodData REST API.",e);
}
catch (GdcRestApiException e) {
l.error("REST API invocation error: "+e.getMessage());
Throwable c = e.getCause();
while(c!=null) {
l.error("Caused by: "+c.getMessage());
c = c.getCause();
}
l.debug("REST API invocation error: ", e);
}
catch (GdcException e) {
l.error("Unrecognized error: "+e.getMessage());
Throwable c = e.getCause();
while(c!=null) {
l.error("Caused by: "+c.getMessage());
c = c.getCause();
}
l.debug("Unrecognized error: ",e);
}
}
/**
* Returns all cli options
* @return all cli options
*/
public static Options getOptions() {
Options ops = new Options();
for( Option o : mandatoryOptions)
ops.addOption(o);
for( Option o : optionalOptions)
ops.addOption(o);
return ops;
}
/**
* Parse and validate the cli arguments
* @param ln parsed command line
* @return parsed cli parameters wrapped in the CliParams
* @throws InvalidArgumentException in case of nonexistent or incorrect cli args
*/
protected CliParams parse(CommandLine ln, Properties defaults) throws InvalidArgumentException {
l.debug("Parsing cli "+ln);
CliParams cp = new CliParams();
for( Option o : mandatoryOptions) {
String name = o.getLongOpt();
if (ln.hasOption(name))
cp.put(name,ln.getOptionValue(name));
else if (defaults.getProperty(name) != null) {
cp.put(name, defaults.getProperty(name));
} else {
throw new InvalidArgumentException("Missing the '"+name+"' commandline parameter.");
}
}
for( Option o : optionalOptions) {
String name = o.getLongOpt();
if (ln.hasOption(name)) {
cp.put(name,ln.getOptionValue(name));
} else if (defaults.getProperty(name) != null) {
cp.put(name, defaults.getProperty(name));
}
}
// use default host if there is no host in the CLI params
if(!cp.containsKey(CLI_PARAM_HOST[0])) {
cp.put(CLI_PARAM_HOST[0], Defaults.DEFAULT_HOST);
}
l.debug("Using host "+cp.get(CLI_PARAM_HOST[0]));
// create default FTP host if there is no host in the CLI params
if(!cp.containsKey(CLI_PARAM_FTP_HOST[0])) {
String[] hcs = cp.get(CLI_PARAM_HOST[0]).split("\\.");
if(hcs != null && hcs.length > 0) {
String ftpHost = "";
for(int i=0; i<hcs.length; i++) {
if(i>0)
ftpHost += "." + hcs[i];
else
ftpHost = hcs[i] + N.FTP_SRV_SUFFIX;
}
cp.put(CLI_PARAM_FTP_HOST[0],ftpHost);
}
else {
throw new IllegalArgumentException("Invalid format of the GoodData REST API host: " +
cp.get(CLI_PARAM_HOST[0]));
}
}
l.debug("Using FTP host "+cp.get(CLI_PARAM_FTP_HOST[0]));
// use default protocol if there is no host in the CLI params
if(!cp.containsKey(CLI_PARAM_PROTO[0])) {
cp.put(CLI_PARAM_PROTO[0], Defaults.DEFAULT_PROTO);
}
else {
String proto = ln.getOptionValue(CLI_PARAM_PROTO[0]).toLowerCase();
if(!"http".equalsIgnoreCase(proto) && !"https".equalsIgnoreCase(proto)) {
throw new InvalidArgumentException("Invalid '"+CLI_PARAM_PROTO[0]+"' parameter. Use HTTP or HTTPS.");
}
cp.put(CLI_PARAM_PROTO[0], proto);
}
l.debug("Using protocol "+cp.get(CLI_PARAM_PROTO[0]));
// use default backend if there is no host in the CLI params
if(!cp.containsKey(CLI_PARAM_BACKEND[0])) {
cp.put(CLI_PARAM_BACKEND[0], Defaults.DEFAULT_BACKEND);
}
else {
String b = cp.get(CLI_PARAM_BACKEND[0]).toLowerCase();
if(!"mysql".equalsIgnoreCase(b) && !"derby".equalsIgnoreCase(b))
b = "derby";
cp.put(CLI_PARAM_BACKEND[0], b);
}
l.debug("Using backend "+cp.get(CLI_PARAM_BACKEND[0]));
if (ln.getArgs().length == 0 && !ln.hasOption("execute")) {
throw new InvalidArgumentException("No command has been given, quitting.");
}
String scripts = "";
for (final String arg : ln.getArgs()) {
if(scripts.length()>0)
scripts += ","+arg;
else
scripts += arg;
}
cp.put(CLI_PARAM_SCRIPT, scripts);
return cp;
}
/**
* Executes the commands in String
* @param commandsStr commansd string
*/
public void execute(final String commandsStr) {
List<Command> cmds = new ArrayList<Command>();
cmds.addAll(parseCmd(commandsStr));
for(Command command : cmds) {
boolean processed = false;
for(int i=0; i<connectors.length && !processed; i++) {
processed = connectors[i].processCommand(command, cliParams, context);
}
if(!processed)
this.processCommand(command, cliParams, context);
}
}
/**
* Executes the commands in file
* @param scriptFile file with commands
* @throws IOException in case of an IO issue
*/
public void execute(final File scriptFile) throws IOException {
List<Command> cmds = new ArrayList<Command>();
cmds.addAll(parseCmd(FileUtil.readStringFromFile(scriptFile.getAbsolutePath())));
for(Command command : cmds) {
boolean processed = false;
for(int i=0; i<connectors.length && !processed; i++) {
processed = connectors[i].processCommand(command, cliParams, context);
}
if(!processed)
processed = this.processCommand(command, cliParams, context);
if(!processed)
throw new InvalidCommandException("Unknown command '"+command.getCommand()+"'");
}
}
/**
* Returns the help for commands
* @return help text
*/
public static String commandsHelp() {
try {
final InputStream is = CliParams.class.getResourceAsStream("/com/gooddata/processor/COMMANDS.txt");
if (is == null)
throw new IOException();
return FileUtil.readStringFromStream(is);
} catch (IOException e) {
l.error("Could not read com/gooddata/processor/COMMANDS.txt");
}
return "";
}
private static boolean checkJavaVersion() {
String version = System.getProperty("java.version");
if(version.startsWith("1.6") || version.startsWith("1.5"))
return true;
l.error("You're running Java "+version+". Please use Java 1.5 or higher for running this tool. " +
"Please refer to http://java.sun.com/javase/downloads/index.jsp for the Java 6 installation.");
throw new InternalErrorException("You're running Java "+version+". Please use use Java 1.5 or higher for running this tool. " +
"Please refer to http://java.sun.com/javase/downloads/index.jsp for the Java 6 installation.");
}
/**
* The main CLI processor
* @param args command line argument
*/
public static void main(String[] args) {
checkJavaVersion();
PropertyConfigurator.configure(System.getProperty("log4j.configuration"));
Properties defaults = loadDefaults();
try {
Options o = getOptions();
CommandLineParser parser = new GnuParser();
CommandLine cmdline = parser.parse(o, args);
GdcDI gdi = new GdcDI(cmdline, defaults);
if (!gdi.finishedSucessfuly) {
System.exit(1);
}
} catch (org.apache.commons.cli.ParseException e) {
l.error("Error parsing command line parameters: "+e.getMessage());
l.debug("Error parsing command line parameters",e);
}
}
/**
* Parses the commands
* @param cmd commands string
* @return array of commands
* @throws InvalidCommandException in case there is an invalid command
*/
protected static List<Command> parseCmd(String cmd) throws InvalidCommandException {
l.debug("Parsing comands: "+cmd);
try {
if(cmd != null && cmd.length()>0) {
Reader r = new StringReader(cmd);
DIScriptParser parser = new DIScriptParser(r);
List<Command> commands = parser.parse();
l.debug("Running "+commands.size()+" commands.");
for(Command c : commands) {
l.debug("Command="+c.getCommand()+" params="+c.getParameters());
}
return commands;
}
}
catch(ParseException e) {
throw new InvalidCommandException("Can't parse command '" + cmd + "'");
}
throw new InvalidCommandException("Can't parse command (empty command).");
}
/**
* {@inheritDoc}
*/
public boolean processCommand(Command c, CliParams cli, ProcessingContext ctx) throws ProcessingException {
l.debug("Processing command "+c.getCommand());
try {
// take project id from command line, may be override in the script
if (cliParams.get(CLI_PARAM_PROJECT[0]) != null) {
ctx.setProjectId(cliParams.get(CLI_PARAM_PROJECT[0]));
}
if(c.match("CreateProject")) {
createProject(c, cli, ctx);
}
else if(c.match("DropProject")) {
dropProject(c, cli, ctx);
}
else if(c.match("OpenProject")) {
ctx.setProjectId(c.getParamMandatory("id"));
l.debug("Opened project id="+ctx.getProjectId());
}
else if(c.match("StoreProject")) {
storeProject(c, cli, ctx);
}
else if(c.match("RetrieveProject")) {
retrieveProject(c, cli, ctx);
}
else if(c.match( "Lock")) {
lock(c, cli, ctx);
}
else if(c.match("getReports")) {
getReports(c, cli, ctx);
}
else if(c.match("ExecuteReports")) {
executeReports(c, cli, ctx);
}
else {
l.debug("No match command "+c.getCommand());
return false;
}
}
catch (IOException e) {
l.debug("Processing command "+c.getCommand()+" failed",e);
throw new ProcessingException(e);
}
catch (InterruptedException e) {
l.debug("Processing command "+c.getCommand()+" failed",e);
throw new ProcessingException(e);
}
l.debug("Processed command "+c.getCommand());
return true;
}
/**
* Create new project command processor
* @param c command
* @param p cli parameters
* @param ctx current context
*/
private void createProject(Command c, CliParams p, ProcessingContext ctx) {
String name = c.getParamMandatory("name");
ctx.setProjectId(ctx.getRestApi(p).createProject(name, name));
String pid = ctx.getProjectId();
l.info("Project id = '"+pid+"' created.");
}
/**
* Drop project command processor
* @param c command
* @param p cli parameters
* @param ctx current context
*/
private void dropProject(Command c, CliParams p, ProcessingContext ctx) {
String id = c.getParamMandatory("id");
ctx.getRestApi(p).dropProject(id);
l.info("Project id = '"+id+"' dropped.");
}
/**
* Enumerate reports
* @param c command
* @param p cli parameters
* @param ctx current context
*/
private void getReports(Command c, CliParams p, ProcessingContext ctx) throws IOException {
String pid = ctx.getProjectId();
String fileName = c.getParamMandatory("fileName");
List<String> uris = ctx.getRestApi(p).enumerateReports(pid);
String result = "";
for(String uri : uris) {
if(result.length() > 0)
result += "\n" + uri;
else
result += uri;
}
FileUtil.writeStringToFile(result, fileName);
}
/**
* Enumerate reports
* @param c command
* @param p cli parameters
* @param ctx current context
*/
private void executeReports(Command c, CliParams p, ProcessingContext ctx) throws IOException, InterruptedException {
String pid = ctx.getProjectId();
String fileName = c.getParamMandatory("fileName");
String result = FileUtil.readStringFromFile(fileName).trim();
if(result != null && result.length()>0) {
String[] uris = result.split("\n");
for(String uri : uris) {
String defUri = ctx.getRestApi(p).getReportDefinition(uri.trim());
l.info("Executing report uri="+defUri);
String task = ctx.getRestApi(p).executeReportDefinition(defUri.trim());
boolean finished = false;
do {
finished = ctx.getRestApi(p).getReportExecutionStatus(task);
if(!finished)
Thread.sleep(1500);
l.info("Checking report " +defUri+ " execution finished status="+finished);
}
while(!finished);
l.info("Report " +defUri+ " execution finished.");
}
}
else {
throw new IOException("There are no reports to execute.");
}
}
/**
* Store project command processor
* @param c command
* @param p cli parameters
* @param ctx current context
* @throws IOException in case of an IO issue
*/
private void storeProject(Command c, CliParams p, ProcessingContext ctx) throws IOException {
String fileName = c.getParamMandatory("fileName");
String pid = ctx.getProjectId();
FileUtil.writeStringToFile(pid, fileName);
l.debug("Stored project id="+pid+" to "+fileName);
}
/**
* Retrieve project command processor
* @param c command
* @param p cli parameters
* @param ctx current context
* @throws IOException in case of an IO issue
*/
private void retrieveProject(Command c, CliParams p, ProcessingContext ctx) throws IOException {
String fileName = c.getParamMandatory("fileName");
ctx.setProjectId(FileUtil.readStringFromFile(fileName).trim());
l.debug("Retrieved project id="+ctx.getProjectId()+" from "+fileName);
}
/**
* Lock project command processor
* @param c command
* @param p cli parameters
* @param ctx current context
* @throws IOException in case of an IO issue
*/
private void lock(Command c, CliParams p, ProcessingContext ctx) throws IOException {
final String path = c.getParamMandatory( "path");
final File lock = new File(path);
if (!lock.createNewFile()) {
if (System.currentTimeMillis() - lock.lastModified() > LOCK_EXPIRATION_TIME) {
lock.delete();
if (!lock.exists()) {
lock(c, p, ctx); // retry
}
}
l.debug("A concurrent process found using the " + path + " lock file.");
throw new IOException("A concurrent process found using the " + path + " lock file.");
}
lock.deleteOnExit();
}
private ConnectorBackend instantiateConnectorBackend() throws IOException {
String b = cliParams.get(CLI_PARAM_BACKEND[0]);
final ConnectorBackend backend;
if("mysql".equalsIgnoreCase(b))
backend = MySqlConnectorBackend.create(cliParams.get(CLI_PARAM_DB_USERNAME[0]),
cliParams.get(CLI_PARAM_DB_PASSWORD[0]));
else if("derby".equalsIgnoreCase(b))
backend = DerbyConnectorBackend.create();
else
throw new IllegalStateException("Invalid backed '" + b + "'");
return backend;
}
/**
* Instantiate all known connectors
* TODO: this should be automated
* @return array of all active connectors
* @throws IOException in case of IO issues
*/
private Connector[] instantiateConnectors(ConnectorBackend backend) throws IOException {
return new Connector[] {
CsvConnector.createConnector(backend),
GaConnector.createConnector(backend),
SfdcConnector.createConnector(backend),
JdbcConnector.createConnector(backend),
DateDimensionConnector.createConnector()
};
}
/**
* Loads default values of common parameters from a properties file searching
* the working directory and user's home.
* @return default configuration
*/
private static Properties loadDefaults() {
final String[] dirs = new String[]{ "user.dir", "user.home" };
final Properties props = new Properties();
for (final String d : dirs) {
String path = System.getProperty(d) + File.separator + DEFAULT_PROPERTIES;
File f = new File(path);
if (f.exists() && f.canRead()) {
try {
FileInputStream is = new FileInputStream(f);
props.load(is);
return props;
} catch (IOException e) {
l.warn("Readable gdi configuration '" + f.getAbsolutePath() + "' found be error occurred reading it.");
l.debug("Error reading gdi configuration '" + f.getAbsolutePath() + "': ", e);
}
}
}
return props;
}
} | cli/src/main/java/com/gooddata/processor/GdcDI.java | /*
* Copyright (c) 2009, GoodData Corporation. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided
* that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this list of conditions and
* the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice, this list of conditions
* and the following disclaimer in the documentation and/or other materials provided with the distribution.
* * Neither the name of the GoodData Corporation nor the names of its contributors may be used to endorse
* or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
* OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.gooddata.processor;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.io.StringReader;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator;
import com.gooddata.connector.Connector;
import com.gooddata.connector.CsvConnector;
import com.gooddata.connector.DateDimensionConnector;
import com.gooddata.connector.GaConnector;
import com.gooddata.connector.JdbcConnector;
import com.gooddata.connector.SfdcConnector;
import com.gooddata.connector.backend.ConnectorBackend;
import com.gooddata.connector.backend.DerbyConnectorBackend;
import com.gooddata.connector.backend.MySqlConnectorBackend;
import com.gooddata.exception.GdcException;
import com.gooddata.exception.GdcLoginException;
import com.gooddata.exception.GdcRestApiException;
import com.gooddata.exception.HttpMethodException;
import com.gooddata.exception.InternalErrorException;
import com.gooddata.exception.InvalidArgumentException;
import com.gooddata.exception.InvalidCommandException;
import com.gooddata.exception.InvalidParameterException;
import com.gooddata.exception.ModelException;
import com.gooddata.exception.ProcessingException;
import com.gooddata.exception.SfdcException;
import com.gooddata.integration.rest.configuration.NamePasswordConfiguration;
import com.gooddata.naming.N;
import com.gooddata.processor.parser.DIScriptParser;
import com.gooddata.processor.parser.ParseException;
import com.gooddata.util.FileUtil;
/**
* The GoodData Data Integration CLI processor.
*
* @author jiri.zaloudek
* @author Zdenek Svoboda <[email protected]>
* @version 1.0
*/
public class GdcDI implements Executor {
private static Logger l = Logger.getLogger(GdcDI.class);
//Options data
public static String[] CLI_PARAM_USERNAME = {"username","u"};
public static String[] CLI_PARAM_PASSWORD = {"password","p"};
public static String[] CLI_PARAM_HOST = {"host","h"};
public static String[] CLI_PARAM_FTP_HOST = {"ftphost","f"};
public static String[] CLI_PARAM_PROJECT = {"project","i"};
public static String[] CLI_PARAM_BACKEND = {"backend","b"};
public static String[] CLI_PARAM_DB_USERNAME = {"dbusername","d"};
public static String[] CLI_PARAM_DB_PASSWORD = {"dbpassword","c"};
public static String[] CLI_PARAM_PROTO = {"proto","t"};
public static String[] CLI_PARAM_EXECUTE = {"execute","e"};
public static String CLI_PARAM_SCRIPT = "script";
private static String DEFAULT_PROPERTIES = "gdi.properties";
// mandatory options
public static Option[] mandatoryOptions = { };
// optional options
public static Option[] optionalOptions = {
new Option(CLI_PARAM_USERNAME[1], CLI_PARAM_USERNAME[0], true, "GoodData username"),
new Option(CLI_PARAM_PASSWORD[1], CLI_PARAM_PASSWORD[0], true, "GoodData password"),
new Option(CLI_PARAM_HOST[1], CLI_PARAM_HOST[0], true, "GoodData host"),
new Option(CLI_PARAM_FTP_HOST[1], CLI_PARAM_FTP_HOST[0], true, "GoodData FTP host"),
new Option(CLI_PARAM_PROJECT[1], CLI_PARAM_PROJECT[0], true, "GoodData project identifier (a string like nszfbgkr75otujmc4smtl6rf5pnmz9yl)"),
new Option(CLI_PARAM_BACKEND[1], CLI_PARAM_BACKEND[0], true, "Database backend DERBY or MYSQL"),
new Option(CLI_PARAM_DB_USERNAME[1], CLI_PARAM_DB_USERNAME[0], true, "Database backend username (not required for the local Derby SQL)"),
new Option(CLI_PARAM_DB_PASSWORD[1], CLI_PARAM_DB_PASSWORD[0], true, "Database backend password (not required for the local Derby SQL)"),
new Option(CLI_PARAM_PROTO[1], CLI_PARAM_PROTO[0], true, "HTTP or HTTPS"),
new Option(CLI_PARAM_EXECUTE[1], CLI_PARAM_EXECUTE[0], true, "Commands and params to execute before the commands in provided files")
};
private CliParams cliParams = null;
private Connector[] connectors = null;
private ProcessingContext context = new ProcessingContext();
private static long LOCK_EXPIRATION_TIME = 1000 * 3600; // 1 hour
private GdcDI(CommandLine ln, Properties defaults) {
try {
cliParams = parse(ln, defaults);
cliParams.setHttpConfig(new NamePasswordConfiguration(
cliParams.get(CLI_PARAM_PROTO[0]), cliParams.get(CLI_PARAM_HOST[0]),
cliParams.get(CLI_PARAM_USERNAME[0]), cliParams.get(CLI_PARAM_PASSWORD[0])));
cliParams.setFtpConfig(new NamePasswordConfiguration(
cliParams.get(CLI_PARAM_PROTO[0]), cliParams.get(CLI_PARAM_FTP_HOST[0]),
cliParams.get(CLI_PARAM_USERNAME[0]), cliParams.get(CLI_PARAM_PASSWORD[0])));
ConnectorBackend backend = null;
try {
backend = instantiateConnectorBackend();
connectors = instantiateConnectors(backend);
String execute = cliParams.get(CLI_PARAM_EXECUTE[0]);
String scripts = cliParams.get(CLI_PARAM_SCRIPT);
if(execute!= null && scripts != null && execute.length()>0 && scripts.length()>0) {
throw new InvalidArgumentException("You can't execute a script and use the -e command line parameter at the same time.");
}
if(execute!= null && execute.length() > 0) {
l.debug("Executing arg="+execute);
execute(execute);
}
if(scripts!= null && scripts.length() > 0) {
String[] sas = scripts.split(",");
for(String script : sas) {
l.debug("Executing file="+script);
execute(new File(script));
}
}
} finally {
if (backend != null) {
backend.close();
}
}
}
catch (InvalidArgumentException e) {
l.error("Invalid command line argument: "+e.getMessage());
Throwable c = e.getCause();
while(c!=null) {
l.error("Caused by: "+c.getMessage());
c = c.getCause();
}
l.debug("Invalid command line argument:",e);
l.info(commandsHelp());
}
catch (InvalidCommandException e) {
l.error("Invalid command: "+e.getMessage());
Throwable c = e.getCause();
while(c!=null) {
l.error("Caused by: "+c.getMessage());
c = c.getCause();
}
l.debug("Invalid command.",e);
}
catch (InvalidParameterException e) {
l.error("Invalid command parameter: "+e.getMessage());
Throwable c = e.getCause();
while(c!=null) {
l.error("Caused by: "+c.getMessage());
c = c.getCause();
}
l.debug("Invalid command parameter.",e);
}
catch (SfdcException e) {
l.error("Error communicating with SalesForce: "+e.getMessage());
Throwable c = e.getCause();
while(c!=null) {
l.error("Caused by: "+c.getMessage());
c = c.getCause();
}
l.debug("Error communicating with SalesForce.",e);
}
catch (ProcessingException e) {
l.error("Error processing command: "+e.getMessage());
Throwable c = e.getCause();
while(c!=null) {
l.error("Caused by: "+c.getMessage());
c = c.getCause();
}
l.debug("Error processing command.",e);
}
catch (ModelException e) {
l.error("Model issue: "+e.getMessage());
Throwable c = e.getCause();
while(c!=null) {
l.error("Caused by: "+c.getMessage());
c = c.getCause();
}
l.debug("Model issue.",e);
}
catch (GdcLoginException e) {
l.error("Error logging to GoodData. Please check your GoodData username and password: "+e.getMessage());
Throwable c = e.getCause();
while(c!=null) {
l.error("Caused by: "+c.getMessage());
c = c.getCause();
}
l.debug("Error logging to GoodData. Please check your GoodData username and password.",e);
}
catch (IOException e) {
l.error("Encountered an IO problem. Please check that all files that you use in your command line arguments and commands exist. More info: '"+e.getMessage()+"'");
Throwable c = e.getCause();
while(c!=null) {
l.error("Caused by: "+c.getMessage());
c = c.getCause();
}
l.debug("Encountered an IO problem. Please check that all files that you use in your command line arguments and commands exist. More info: '"+e.getMessage()+"'",e);
}
catch (InternalErrorException e) {
Throwable c = e.getCause();
if( c != null && c instanceof SQLException) {
l.error("Error extracting data. Can't process the incoming data. Please check the CSV file " +
"separator and consistency (same number of columns in each row). Also, please make sure " +
"that the number of columns in your XML config file matches the number of rows in your " +
"data source. Make sure that your file is readable by other users (particularly the mysql user). " +
"More info: '"+c.getMessage()+"'");
l.debug("Error extracting data. Can't process the incoming data. Please check the CSV file " +
"separator and consistency (same number of columns in each row). Also, please make sure " +
"that the number of columns in your XML config file matches the number of rows in your " +
"data source. Make sure that your file is readable by other users (particularly the mysql user). " +
"More info: '"+c.getMessage()+"'",c);
}
else {
l.error("Internal error: "+e.getMessage());
c = e.getCause();
while(c!=null) {
l.error("Caused by: "+c.getMessage());
c = c.getCause();
}
l.debug("REST API invocation error: ",e);
}
}
catch (HttpMethodException e) {
l.error("Error executing GoodData REST API: "+e.getMessage());
Throwable c = e.getCause();
while(c!=null) {
l.error("Caused by: "+c.getMessage());
c = c.getCause();
}
l.debug("Error executing GoodData REST API.",e);
}
catch (GdcRestApiException e) {
l.error("REST API invocation error: "+e.getMessage());
Throwable c = e.getCause();
while(c!=null) {
l.error("Caused by: "+c.getMessage());
c = c.getCause();
}
l.debug("REST API invocation error: ", e);
}
catch (GdcException e) {
l.error("Unrecognized error: "+e.getMessage());
Throwable c = e.getCause();
while(c!=null) {
l.error("Caused by: "+c.getMessage());
c = c.getCause();
}
l.debug("Unrecognized error: ",e);
}
}
/**
* Returns all cli options
* @return all cli options
*/
public static Options getOptions() {
Options ops = new Options();
for( Option o : mandatoryOptions)
ops.addOption(o);
for( Option o : optionalOptions)
ops.addOption(o);
return ops;
}
/**
* Parse and validate the cli arguments
* @param ln parsed command line
* @return parsed cli parameters wrapped in the CliParams
* @throws InvalidArgumentException in case of nonexistent or incorrect cli args
*/
protected CliParams parse(CommandLine ln, Properties defaults) throws InvalidArgumentException {
l.debug("Parsing cli "+ln);
CliParams cp = new CliParams();
for( Option o : mandatoryOptions) {
String name = o.getLongOpt();
if (ln.hasOption(name))
cp.put(name,ln.getOptionValue(name));
else if (defaults.getProperty(name) != null) {
cp.put(name, defaults.getProperty(name));
} else {
throw new InvalidArgumentException("Missing the '"+name+"' commandline parameter.");
}
}
for( Option o : optionalOptions) {
String name = o.getLongOpt();
if (ln.hasOption(name)) {
cp.put(name,ln.getOptionValue(name));
} else if (defaults.getProperty(name) != null) {
cp.put(name, defaults.getProperty(name));
}
}
// use default host if there is no host in the CLI params
if(!cp.containsKey(CLI_PARAM_HOST[0])) {
cp.put(CLI_PARAM_HOST[0], Defaults.DEFAULT_HOST);
}
l.debug("Using host "+cp.get(CLI_PARAM_HOST[0]));
// create default FTP host if there is no host in the CLI params
if(!cp.containsKey(CLI_PARAM_FTP_HOST[0])) {
String[] hcs = cp.get(CLI_PARAM_HOST[0]).split("\\.");
if(hcs != null && hcs.length > 0) {
String ftpHost = "";
for(int i=0; i<hcs.length; i++) {
if(i>0)
ftpHost += "." + hcs[i];
else
ftpHost = hcs[i] + N.FTP_SRV_SUFFIX;
}
cp.put(CLI_PARAM_FTP_HOST[0],ftpHost);
}
else {
throw new IllegalArgumentException("Invalid format of the GoodData REST API host: " +
cp.get(CLI_PARAM_HOST[0]));
}
}
l.debug("Using FTP host "+cp.get(CLI_PARAM_FTP_HOST[0]));
// use default protocol if there is no host in the CLI params
if(!cp.containsKey(CLI_PARAM_PROTO[0])) {
cp.put(CLI_PARAM_PROTO[0], Defaults.DEFAULT_PROTO);
}
else {
String proto = ln.getOptionValue(CLI_PARAM_PROTO[0]).toLowerCase();
if(!"http".equalsIgnoreCase(proto) && !"https".equalsIgnoreCase(proto)) {
throw new InvalidArgumentException("Invalid '"+CLI_PARAM_PROTO[0]+"' parameter. Use HTTP or HTTPS.");
}
cp.put(CLI_PARAM_PROTO[0], proto);
}
l.debug("Using protocol "+cp.get(CLI_PARAM_PROTO[0]));
// use default backend if there is no host in the CLI params
if(!cp.containsKey(CLI_PARAM_BACKEND[0])) {
cp.put(CLI_PARAM_BACKEND[0], Defaults.DEFAULT_BACKEND);
}
else {
String b = cp.get(CLI_PARAM_BACKEND[0]).toLowerCase();
if(!"mysql".equalsIgnoreCase(b) && !"derby".equalsIgnoreCase(b))
b = "derby";
cp.put(CLI_PARAM_BACKEND[0], b);
}
l.debug("Using backend "+cp.get(CLI_PARAM_BACKEND[0]));
if (ln.getArgs().length == 0 && !ln.hasOption("execute")) {
throw new InvalidArgumentException("No command has been given, quitting.");
}
String scripts = "";
for (final String arg : ln.getArgs()) {
if(scripts.length()>0)
scripts += ","+arg;
else
scripts += arg;
}
cp.put(CLI_PARAM_SCRIPT, scripts);
return cp;
}
/**
* Executes the commands in String
* @param commandsStr commansd string
*/
public void execute(final String commandsStr) {
List<Command> cmds = new ArrayList<Command>();
cmds.addAll(parseCmd(commandsStr));
for(Command command : cmds) {
boolean processed = false;
for(int i=0; i<connectors.length && !processed; i++) {
processed = connectors[i].processCommand(command, cliParams, context);
}
if(!processed)
this.processCommand(command, cliParams, context);
}
}
/**
* Executes the commands in file
* @param scriptFile file with commands
* @throws IOException in case of an IO issue
*/
public void execute(final File scriptFile) throws IOException {
List<Command> cmds = new ArrayList<Command>();
cmds.addAll(parseCmd(FileUtil.readStringFromFile(scriptFile.getAbsolutePath())));
for(Command command : cmds) {
boolean processed = false;
for(int i=0; i<connectors.length && !processed; i++) {
processed = connectors[i].processCommand(command, cliParams, context);
}
if(!processed)
processed = this.processCommand(command, cliParams, context);
if(!processed)
throw new InvalidCommandException("Unknown command '"+command.getCommand()+"'");
}
}
/**
* Returns the help for commands
* @return help text
*/
public static String commandsHelp() {
try {
final InputStream is = CliParams.class.getResourceAsStream("/com/gooddata/processor/COMMANDS.txt");
if (is == null)
throw new IOException();
return FileUtil.readStringFromStream(is);
} catch (IOException e) {
l.error("Could not read com/gooddata/processor/COMMANDS.txt");
}
return "";
}
private static boolean checkJavaVersion() {
String version = System.getProperty("java.version");
if(version.startsWith("1.6") || version.startsWith("1.5"))
return true;
l.error("You're running Java "+version+". Please use Java 1.5 or higher for running this tool. " +
"Please refer to http://java.sun.com/javase/downloads/index.jsp for the Java 6 installation.");
throw new InternalErrorException("You're running Java "+version+". Please use use Java 1.5 or higher for running this tool. " +
"Please refer to http://java.sun.com/javase/downloads/index.jsp for the Java 6 installation.");
}
/**
* The main CLI processor
* @param args command line argument
*/
public static void main(String[] args) {
checkJavaVersion();
PropertyConfigurator.configure(System.getProperty("log4j.configuration"));
Properties defaults = loadDefaults();
try {
Options o = getOptions();
CommandLineParser parser = new GnuParser();
CommandLine cmdline = parser.parse(o, args);
new GdcDI(cmdline, defaults);
} catch (org.apache.commons.cli.ParseException e) {
l.error("Error parsing command line parameters: "+e.getMessage());
l.debug("Error parsing command line parameters",e);
}
}
/**
* Parses the commands
* @param cmd commands string
* @return array of commands
* @throws InvalidCommandException in case there is an invalid command
*/
protected static List<Command> parseCmd(String cmd) throws InvalidCommandException {
l.debug("Parsing comands: "+cmd);
try {
if(cmd != null && cmd.length()>0) {
Reader r = new StringReader(cmd);
DIScriptParser parser = new DIScriptParser(r);
List<Command> commands = parser.parse();
l.debug("Running "+commands.size()+" commands.");
for(Command c : commands) {
l.debug("Command="+c.getCommand()+" params="+c.getParameters());
}
return commands;
}
}
catch(ParseException e) {
throw new InvalidCommandException("Can't parse command '" + cmd + "'");
}
throw new InvalidCommandException("Can't parse command (empty command).");
}
/**
* {@inheritDoc}
*/
public boolean processCommand(Command c, CliParams cli, ProcessingContext ctx) throws ProcessingException {
l.debug("Processing command "+c.getCommand());
try {
// take project id from command line, may be override in the script
if (cliParams.get(CLI_PARAM_PROJECT[0]) != null) {
ctx.setProjectId(cliParams.get(CLI_PARAM_PROJECT[0]));
}
if(c.match("CreateProject")) {
createProject(c, cli, ctx);
}
else if(c.match("DropProject")) {
dropProject(c, cli, ctx);
}
else if(c.match("OpenProject")) {
ctx.setProjectId(c.getParamMandatory("id"));
l.debug("Opened project id="+ctx.getProjectId());
}
else if(c.match("StoreProject")) {
storeProject(c, cli, ctx);
}
else if(c.match("RetrieveProject")) {
retrieveProject(c, cli, ctx);
}
else if(c.match( "Lock")) {
lock(c, cli, ctx);
}
else if(c.match("getReports")) {
getReports(c, cli, ctx);
}
else if(c.match("ExecuteReports")) {
executeReports(c, cli, ctx);
}
else {
l.debug("No match command "+c.getCommand());
return false;
}
}
catch (IOException e) {
l.debug("Processing command "+c.getCommand()+" failed",e);
throw new ProcessingException(e);
}
catch (InterruptedException e) {
l.debug("Processing command "+c.getCommand()+" failed",e);
throw new ProcessingException(e);
}
l.debug("Processed command "+c.getCommand());
return true;
}
/**
* Create new project command processor
* @param c command
* @param p cli parameters
* @param ctx current context
*/
private void createProject(Command c, CliParams p, ProcessingContext ctx) {
String name = c.getParamMandatory("name");
ctx.setProjectId(ctx.getRestApi(p).createProject(name, name));
String pid = ctx.getProjectId();
l.info("Project id = '"+pid+"' created.");
}
/**
* Drop project command processor
* @param c command
* @param p cli parameters
* @param ctx current context
*/
private void dropProject(Command c, CliParams p, ProcessingContext ctx) {
String id = c.getParamMandatory("id");
ctx.getRestApi(p).dropProject(id);
l.info("Project id = '"+id+"' dropped.");
}
/**
* Enumerate reports
* @param c command
* @param p cli parameters
* @param ctx current context
*/
private void getReports(Command c, CliParams p, ProcessingContext ctx) throws IOException {
String pid = ctx.getProjectId();
String fileName = c.getParamMandatory("fileName");
List<String> uris = ctx.getRestApi(p).enumerateReports(pid);
String result = "";
for(String uri : uris) {
if(result.length() > 0)
result += "\n" + uri;
else
result += uri;
}
FileUtil.writeStringToFile(result, fileName);
}
/**
* Enumerate reports
* @param c command
* @param p cli parameters
* @param ctx current context
*/
private void executeReports(Command c, CliParams p, ProcessingContext ctx) throws IOException, InterruptedException {
String pid = ctx.getProjectId();
String fileName = c.getParamMandatory("fileName");
String result = FileUtil.readStringFromFile(fileName).trim();
if(result != null && result.length()>0) {
String[] uris = result.split("\n");
for(String uri : uris) {
String defUri = ctx.getRestApi(p).getReportDefinition(uri.trim());
l.info("Executing report uri="+defUri);
String task = ctx.getRestApi(p).executeReportDefinition(defUri.trim());
boolean finished = false;
do {
finished = ctx.getRestApi(p).getReportExecutionStatus(task);
if(!finished)
Thread.sleep(1500);
l.info("Checking report " +defUri+ " execution finished status="+finished);
}
while(!finished);
l.info("Report " +defUri+ " execution finished.");
}
}
else {
throw new IOException("There are no reports to execute.");
}
}
/**
* Store project command processor
* @param c command
* @param p cli parameters
* @param ctx current context
* @throws IOException in case of an IO issue
*/
private void storeProject(Command c, CliParams p, ProcessingContext ctx) throws IOException {
String fileName = c.getParamMandatory("fileName");
String pid = ctx.getProjectId();
FileUtil.writeStringToFile(pid, fileName);
l.debug("Stored project id="+pid+" to "+fileName);
}
/**
* Retrieve project command processor
* @param c command
* @param p cli parameters
* @param ctx current context
* @throws IOException in case of an IO issue
*/
private void retrieveProject(Command c, CliParams p, ProcessingContext ctx) throws IOException {
String fileName = c.getParamMandatory("fileName");
ctx.setProjectId(FileUtil.readStringFromFile(fileName).trim());
l.debug("Retrieved project id="+ctx.getProjectId()+" from "+fileName);
}
/**
* Lock project command processor
* @param c command
* @param p cli parameters
* @param ctx current context
* @throws IOException in case of an IO issue
*/
private void lock(Command c, CliParams p, ProcessingContext ctx) throws IOException {
final String path = c.getParamMandatory( "path");
final File lock = new File(path);
if (!lock.createNewFile()) {
if (System.currentTimeMillis() - lock.lastModified() > LOCK_EXPIRATION_TIME) {
lock.delete();
if (!lock.exists()) {
lock(c, p, ctx); // retry
}
}
l.debug("A concurrent process found using the " + path + " lock file.");
throw new IOException("A concurrent process found using the " + path + " lock file.");
}
lock.deleteOnExit();
}
private ConnectorBackend instantiateConnectorBackend() throws IOException {
String b = cliParams.get(CLI_PARAM_BACKEND[0]);
final ConnectorBackend backend;
if("mysql".equalsIgnoreCase(b))
backend = MySqlConnectorBackend.create(cliParams.get(CLI_PARAM_DB_USERNAME[0]),
cliParams.get(CLI_PARAM_DB_PASSWORD[0]));
else if("derby".equalsIgnoreCase(b))
backend = DerbyConnectorBackend.create();
else
throw new IllegalStateException("Invalid backed '" + b + "'");
return backend;
}
/**
* Instantiate all known connectors
* TODO: this should be automated
* @return array of all active connectors
* @throws IOException in case of IO issues
*/
private Connector[] instantiateConnectors(ConnectorBackend backend) throws IOException {
return new Connector[] {
CsvConnector.createConnector(backend),
GaConnector.createConnector(backend),
SfdcConnector.createConnector(backend),
JdbcConnector.createConnector(backend),
DateDimensionConnector.createConnector()
};
}
/**
* Loads default values of common parameters from a properties file searching
* the working directory and user's home.
* @return default configuration
*/
private static Properties loadDefaults() {
final String[] dirs = new String[]{ "user.dir", "user.home" };
final Properties props = new Properties();
for (final String d : dirs) {
String path = System.getProperty(d) + File.separator + DEFAULT_PROPERTIES;
File f = new File(path);
if (f.exists() && f.canRead()) {
try {
FileInputStream is = new FileInputStream(f);
props.load(is);
return props;
} catch (IOException e) {
l.warn("Readable gdi configuration '" + f.getAbsolutePath() + "' found be error occurred reading it.");
l.debug("Error reading gdi configuration '" + f.getAbsolutePath() + "': ", e);
}
}
}
return props;
}
} | return a non-zero status code on error
| cli/src/main/java/com/gooddata/processor/GdcDI.java | return a non-zero status code on error | <ide><path>li/src/main/java/com/gooddata/processor/GdcDI.java
<ide> private Connector[] connectors = null;
<ide>
<ide> private ProcessingContext context = new ProcessingContext();
<add>
<add> private boolean finishedSucessfuly = false;
<ide>
<ide> private static long LOCK_EXPIRATION_TIME = 1000 * 3600; // 1 hour
<ide>
<ide> execute(new File(script));
<ide> }
<ide> }
<add> finishedSucessfuly = true;
<ide> } finally {
<ide> if (backend != null) {
<ide> backend.close();
<ide> Options o = getOptions();
<ide> CommandLineParser parser = new GnuParser();
<ide> CommandLine cmdline = parser.parse(o, args);
<del> new GdcDI(cmdline, defaults);
<add> GdcDI gdi = new GdcDI(cmdline, defaults);
<add> if (!gdi.finishedSucessfuly) {
<add> System.exit(1);
<add> }
<ide> } catch (org.apache.commons.cli.ParseException e) {
<ide> l.error("Error parsing command line parameters: "+e.getMessage());
<ide> l.debug("Error parsing command line parameters",e); |
|
JavaScript | bsd-3-clause | 91e1a90a347bab71471d2368ee8a35b9609d23a8 | 0 | kbarbounakis/themost,themost-framework/themost,kbarbounakis/themost,kbarbounakis/themost | /**
* @license
* MOST Web Framework 2.0 Codename Blueshift
* Copyright (c) 2017, THEMOST LP All rights reserved
*
* Use of this source code is governed by an BSD-3-Clause license that can be
* found in the LICENSE file at https://themost.io/license
*/
var formidable = require('formidable');
var _ = require('lodash');
var TraceUtils = require('@themost/common/utils').TraceUtils;
/**
* @class UnknownValue
* @constructor
*/
function UnknownValue() {
//
}
UnknownValue.prototype.valueOf = function() { return null; };
UnknownValue.prototype.toJSON = function() { return null; };
UnknownValue.DateTimeRegex = /^\d{4}-([0]\d|1[0-2])-([0-2]\d|3[01])(?:[T ](\d+):(\d+)(?::(\d+)(?:\.(\d+))?)?)?(?:Z(-?\d*))?([+-](\d+):(\d+))?$/;
UnknownValue.BooleanTrueRegex = /^true$/i;
UnknownValue.BooleanFalseRegex = /^false$/i;
UnknownValue.NullRegex = /^null$/i;
UnknownValue.UndefinedRegex = /^undefined$/i;
UnknownValue.IntegerRegex =/^[-+]?\d+$/;
UnknownValue.FloatRegex =/^[+-]?\d+(\.\d+)?$/;
/**
* @class UnknownPropertyDescriptor
* @constructor
*/
function UnknownPropertyDescriptor(obj, name) {
Object.defineProperty(this, 'value', { configurable:false, enumerable:true, get: function() { return obj[name]; }, set: function(value) { obj[name]=value; } });
Object.defineProperty(this, 'name', { configurable:false, enumerable:true, get: function() { return name; } });
}
/**
* @param {string} value
*/
UnknownValue.convert = function(value) {
var result;
if ((typeof value === 'string'))
{
if (value.length===0) {
result = value
}
if (value.match(UnknownValue.BooleanTrueRegex)) {
result = true;
}
else if (value.match(UnknownValue.BooleanFalseRegex)) {
result = false;
}
else if (value.match(UnknownValue.NullRegex) || value.match(UnknownValue.UndefinedRegex)) {
result = null;
}
else if (value.match(UnknownValue.IntegerRegex)) {
result = parseInt(value);
}
else if (value.match(UnknownValue.FloatRegex)) {
result = parseFloat(value);
}
else if (value.match(UnknownValue.DateTimeRegex)) {
result = new Date(Date.parse(value));
}
else {
result = value;
}
}
else {
result = value;
}
return result;
};
/**
* @class PostHandler
* @constructor
* @augments HttpHandler
*/
function PostHandler() {
}
/**
*
* @param {*} origin
* @param {string} expr
* @param {string} value
* @param {*=} options
* @returns {*}
* @private
*/
function extend(origin, expr, value, options) {
options = options || { convertValues:false };
//find base notation
var match = /(^\w+)\[/.exec(expr), name, descriptor, expr1;
if (match) {
//get property name
name = match[1];
//validate array property
if (/^\d+$/g.test(name)) {
//property is an array
if (!_.isArray(origin.value))
origin.value = [];
// get new expression
expr1 = expr.substr(match.index + match[1].length);
extend(origin, expr1, value, options);
}
else {
//set property value (unknown)
origin[name] = origin[name] || new UnknownValue();
descriptor = new UnknownPropertyDescriptor(origin, name);
// get new expression
expr1 = expr.substr(match.index + match[1].length);
extend(descriptor, expr1, value, options);
}
}
else if (expr.indexOf('[')===0) {
//get property
var re = /\[(.*?)\]/g;
match = re.exec(expr);
if (match) {
name = match[1];
// get new expression
expr1 = expr.substr(match.index + match[0].length);
if (/^\d+$/g.test(name)) {
//property is an array
if (!_.isArray(origin.value))
origin.value = [];
}
if (expr1.length===0) {
if (origin.value instanceof UnknownValue) {
origin.value = {};
}
var typedValue;
//convert string value
if ((typeof value === 'string') && options.convertValues) {
typedValue = UnknownValue.convert(value);
}
else {
typedValue = value;
}
if (_.isArray(origin.value))
origin.value.push(typedValue);
else
origin.value[name] = typedValue;
}
else {
if (origin.value instanceof UnknownValue) {
origin.value = { };
}
origin.value[name] = origin.value[name] || new UnknownValue();
descriptor = new UnknownPropertyDescriptor(origin.value, name);
extend(descriptor, expr1, value, options);
}
}
else {
throw new Error('Invalid object property notation. Expected [name]');
}
}
else if (/^[\w-]*$/.test(expr)) {
if (options.convertValues)
origin[expr] = UnknownValue.convert(value);
else
origin[expr] = value;
}
else {
throw new Error('Invalid object property notation. Expected property[name] or [name]');
}
return origin;
}
/**
* Parses a form object and returns form parameters as object e.g. user[name]=user&user1[password]=1234 returns user: { name: 'user1', password:'1234'}
* @param form
* @private
*/
function parseForm(form) {
var result = {};
if (typeof form === 'undefined' || form===null)
return result;
var keys = Object.keys(form);
keys.forEach(function(key) {
if (form.hasOwnProperty(key))
{
extend(result, key, form[key]);
}
});
return result;
}
PostHandler.prototype.beginRequest = function(context, callback) {
try {
var request = context.request;
//extend params object (parse form data)
if (typeof request.socket === 'undefined') {
callback();
}
else {
request.headers = request.headers || {};
if (/^application\/x-www-form-urlencoded/i.test(request.headers['content-type'])) {
//use formidable to parse request data
var f = new formidable.IncomingForm();
f.parse(request, function (err, form, files) {
if (err) {
callback(err);
return;
}
try {
//add form
if (form) {
_.assign(context.params, parseForm(form));
}
//add files
if (files)
_.assign(context.params, files);
callback();
}
catch (e) {
callback(e);
}
});
}
else {
callback();
}
}
}
catch (err) {
TraceUtils.log(err);
callback(new Error("An internal server error occured while parsing request data."));
}
};
if (typeof exports !== 'undefined') {
exports.UnknownValue = UnknownValue;
exports.createInstance = function() {
return new PostHandler();
};
} | modules/@themost/web/handlers/post.js | /**
* @license
* MOST Web Framework 2.0 Codename Blueshift
* Copyright (c) 2017, THEMOST LP All rights reserved
*
* Use of this source code is governed by an BSD-3-Clause license that can be
* found in the LICENSE file at https://themost.io/license
*/
var formidable = require('formidable');
var _ = require('lodash');
var TraceUtils = require('@themost/common/utils').TraceUtils;
/**
* @class UnknownValue
* @constructor
*/
function UnknownValue() {
//
}
UnknownValue.prototype.valueOf = function() { return null; };
UnknownValue.prototype.toJSON = function() { return null; };
UnknownValue.DateTimeRegex = /^\d{4}-([0]\d|1[0-2])-([0-2]\d|3[01])(?:[T ](\d+):(\d+)(?::(\d+)(?:\.(\d+))?)?)?(?:Z(-?\d*))?([+-](\d+):(\d+))?$/g;
UnknownValue.BooleanTrueRegex = /^true$/ig;
UnknownValue.BooleanFalseRegex = /^false$/ig;
UnknownValue.NullRegex = /^null$/ig;
UnknownValue.UndefinedRegex = /^undefined$/ig;
UnknownValue.IntegerRegex =/^[-+]?\d+$/g;
UnknownValue.FloatRegex =/^[+-]?\d+(\.\d+)?$/g;
/**
* @class UnknownPropertyDescriptor
* @constructor
*/
function UnknownPropertyDescriptor(obj, name) {
Object.defineProperty(this, 'value', { configurable:false, enumerable:true, get: function() { return obj[name]; }, set: function(value) { obj[name]=value; } });
Object.defineProperty(this, 'name', { configurable:false, enumerable:true, get: function() { return name; } });
}
/**
* @param {string} value
*/
UnknownValue.convert = function(value) {
var result;
if ((typeof value === 'string'))
{
if (value.length===0) {
result = value
}
if (value.match(UnknownValue.BooleanTrueRegex)) {
result = true;
}
else if (value.match(UnknownValue.BooleanFalseRegex)) {
result = false;
}
else if (value.match(UnknownValue.NullRegex) || value.match(UnknownValue.UndefinedRegex)) {
result = null;
}
else if (value.match(UnknownValue.IntegerRegex)) {
result = parseInt(value);
}
else if (value.match(UnknownValue.FloatRegex)) {
result = parseFloat(value);
}
else if (value.match(UnknownValue.DateTimeRegex)) {
result = new Date(Date.parse(value));
}
else {
result = value;
}
}
else {
result = value;
}
return result;
};
/**
* @class PostHandler
* @constructor
* @augments HttpHandler
*/
function PostHandler() {
}
/**
*
* @param {*} origin
* @param {string} expr
* @param {string} value
* @param {*=} options
* @returns {*}
* @private
*/
function extend(origin, expr, value, options) {
options = options || { convertValues:false };
//find base notation
var match = /(^\w+)\[/.exec(expr), name, descriptor, expr1;
if (match) {
//get property name
name = match[1];
//validate array property
if (/^\d+$/g.test(name)) {
//property is an array
if (!_.isArray(origin.value))
origin.value = [];
// get new expression
expr1 = expr.substr(match.index + match[1].length);
extend(origin, expr1, value, options);
}
else {
//set property value (unknown)
origin[name] = origin[name] || new UnknownValue();
descriptor = new UnknownPropertyDescriptor(origin, name);
// get new expression
expr1 = expr.substr(match.index + match[1].length);
extend(descriptor, expr1, value, options);
}
}
else if (expr.indexOf('[')===0) {
//get property
var re = /\[(.*?)\]/g;
match = re.exec(expr);
if (match) {
name = match[1];
// get new expression
expr1 = expr.substr(match.index + match[0].length);
if (/^\d+$/g.test(name)) {
//property is an array
if (!_.isArray(origin.value))
origin.value = [];
}
if (expr1.length===0) {
if (origin.value instanceof UnknownValue) {
origin.value = {};
}
var typedValue;
//convert string value
if ((typeof value === 'string') && options.convertValues) {
typedValue = UnknownValue.convert(value);
}
else {
typedValue = value;
}
if (_.isArray(origin.value))
origin.value.push(typedValue);
else
origin.value[name] = typedValue;
}
else {
if (origin.value instanceof UnknownValue) {
origin.value = { };
}
origin.value[name] = origin.value[name] || new UnknownValue();
descriptor = new UnknownPropertyDescriptor(origin.value, name);
extend(descriptor, expr1, value, options);
}
}
else {
throw new Error('Invalid object property notation. Expected [name]');
}
}
else if (/^[\w-]*$/.test(expr)) {
if (options.convertValues)
origin[expr] = UnknownValue.convert(value);
else
origin[expr] = value;
}
else {
throw new Error('Invalid object property notation. Expected property[name] or [name]');
}
return origin;
}
/**
* Parses a form object and returns form parameters as object e.g. user[name]=user&user1[password]=1234 returns user: { name: 'user1', password:'1234'}
* @param form
* @private
*/
function parseForm(form) {
var result = {};
if (typeof form === 'undefined' || form===null)
return result;
var keys = Object.keys(form);
keys.forEach(function(key) {
if (form.hasOwnProperty(key))
{
extend(result, key, form[key]);
}
});
return result;
}
PostHandler.prototype.beginRequest = function(context, callback) {
try {
var request = context.request;
//extend params object (parse form data)
if (typeof request.socket === 'undefined') {
callback();
}
else {
request.headers = request.headers || {};
if (/^application\/x-www-form-urlencoded/i.test(request.headers['content-type'])) {
//use formidable to parse request data
var f = new formidable.IncomingForm();
f.parse(request, function (err, form, files) {
if (err) {
callback(err);
return;
}
try {
//add form
if (form) {
_.assign(context.params, parseForm(form));
}
//add files
if (files)
_.assign(context.params, files);
callback();
}
catch (e) {
callback(e);
}
});
}
else {
callback();
}
}
}
catch (err) {
TraceUtils.log(err);
callback(new Error("An internal server error occured while parsing request data."));
}
};
if (typeof exports !== 'undefined') {
exports.UnknownValue = UnknownValue;
exports.createInstance = function() {
return new PostHandler();
};
} | updates post handler (corrects unknown values regular expressions by removing global /g flag)
| modules/@themost/web/handlers/post.js | updates post handler (corrects unknown values regular expressions by removing global /g flag) | <ide><path>odules/@themost/web/handlers/post.js
<ide>
<ide> UnknownValue.prototype.toJSON = function() { return null; };
<ide>
<del>UnknownValue.DateTimeRegex = /^\d{4}-([0]\d|1[0-2])-([0-2]\d|3[01])(?:[T ](\d+):(\d+)(?::(\d+)(?:\.(\d+))?)?)?(?:Z(-?\d*))?([+-](\d+):(\d+))?$/g;
<del>UnknownValue.BooleanTrueRegex = /^true$/ig;
<del>UnknownValue.BooleanFalseRegex = /^false$/ig;
<del>UnknownValue.NullRegex = /^null$/ig;
<del>UnknownValue.UndefinedRegex = /^undefined$/ig;
<del>UnknownValue.IntegerRegex =/^[-+]?\d+$/g;
<del>UnknownValue.FloatRegex =/^[+-]?\d+(\.\d+)?$/g;
<add>UnknownValue.DateTimeRegex = /^\d{4}-([0]\d|1[0-2])-([0-2]\d|3[01])(?:[T ](\d+):(\d+)(?::(\d+)(?:\.(\d+))?)?)?(?:Z(-?\d*))?([+-](\d+):(\d+))?$/;
<add>UnknownValue.BooleanTrueRegex = /^true$/i;
<add>UnknownValue.BooleanFalseRegex = /^false$/i;
<add>UnknownValue.NullRegex = /^null$/i;
<add>UnknownValue.UndefinedRegex = /^undefined$/i;
<add>UnknownValue.IntegerRegex =/^[-+]?\d+$/;
<add>UnknownValue.FloatRegex =/^[+-]?\d+(\.\d+)?$/;
<ide> /**
<ide> * @class UnknownPropertyDescriptor
<ide> * @constructor |
|
Java | mit | 1af1e12b1b83b66b4306ef57ca163bfb142cb6f8 | 0 | nls-oskari/oskari-server,nls-oskari/oskari-server,nls-oskari/oskari-server | package flyway.oskari;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
/**
* Database helper for statsgrid migration
*/
public class ThematicMapsViewHelper {
public static long getBundleId(Connection conn, String name) throws SQLException {
String sql = "SELECT id FROM portti_bundle WHERE name = ?";
try (PreparedStatement ps = conn.prepareStatement(sql)) {
ps.setString(1, name);
try (ResultSet rs = ps.executeQuery()) {
if (rs.next()) {
return rs.getLong("id");
}
return -1L;
}
}
}
public static ConfigNState getBundle(Connection conn, long id) throws SQLException {
String sql = "SELECT id, config, state, startup FROM portti_bundle WHERE id = ?";
try (PreparedStatement ps = conn.prepareStatement(sql)) {
ps.setLong(1, id);
try (ResultSet rs = ps.executeQuery()) {
if (rs.next()) {
ConfigNState cfg = new ConfigNState();
cfg.bundle_id = rs.getLong("id");
cfg.config = rs.getString("config");
cfg.state = rs.getString("state");
cfg.startup = rs.getString("startup");
return cfg;
}
throw new SQLException("Couldnt find bundle with id " +id);
}
}
}
public static List<ConfigNState> getConfigsAndStates(Connection conn, long bundleId, String bundlePath) throws SQLException {
String sql = "SELECT view_id, bundle_id, seqno, config, state, startup"
+ " FROM portti_view_bundle_seq WHERE bundle_id = ?";
try (PreparedStatement ps = conn.prepareStatement(sql)) {
ps.setLong(1, bundleId);
try (ResultSet rs = ps.executeQuery()) {
List<ConfigNState> configsAndStates = new ArrayList<>();
while (rs.next()) {
String startup = rs.getString("startup");
if(bundlePath != null && !startup.contains(bundlePath)) {
// we are only interested in bundles with old path
// not in ones using the new path
continue;
}
ConfigNState cfg = new ConfigNState();
cfg.view_id = rs.getLong("view_id");
cfg.bundle_id = rs.getLong("bundle_id");
cfg.seqno = rs.getInt("seqno");
cfg.config = rs.getString("config");
cfg.state = rs.getString("state");
configsAndStates.add(cfg);
}
return configsAndStates;
}
}
}
public static void update(Connection conn, List<ConfigNState> configsAndStates) throws SQLException {
String sql = "UPDATE portti_view_bundle_seq SET"
+ " config = ?, state = ?"
+ " WHERE view_id = ? AND bundle_id = ? AND seqno = ?";
try (PreparedStatement ps = conn.prepareStatement(sql)) {
for (ConfigNState configAndState : configsAndStates) {
ps.setString(1, configAndState.config);
ps.setString(2, configAndState.state);
ps.setLong(3, configAndState.view_id);
ps.setLong(4, configAndState.bundle_id);
ps.setInt(5, configAndState.seqno);
ps.addBatch();
}
ps.executeBatch();
}
}
public static void switchBundle(Connection conn, long old_bundle_id, long new_bundle_id) throws SQLException {
final String startup = getBundle(conn, new_bundle_id).startup;
String sql = "UPDATE portti_view_bundle_seq SET"
+ " startup = ?, bundle_id= ?, bundleinstance='statsgrid'"
+ " WHERE bundle_id = ?";
try (PreparedStatement ps = conn.prepareStatement(sql)) {
ps.setString(1, startup);
ps.setLong(2, new_bundle_id);
ps.setLong(3, old_bundle_id);
ps.execute();
}
}
/**
* Returns a list of view ids that contain a bundle (statsgrid), but don't have divmanazer (required by the new statsgrid)
*/
public static List<Long> findAppsetupsHavingBundleButNoDivmanazer(Connection conn, long bundleId) throws SQLException {
String sql = "SELECT view_id FROM portti_view_bundle_seq WHERE bundle_id = ? \n" +
"and view_id not in (select view_id FROM portti_view_bundle_seq WHERE bundle_id = (select id from portti_bundle where name = 'divmanazer'))";
try (PreparedStatement ps = conn.prepareStatement(sql)) {
ps.setLong(1, bundleId);
try (ResultSet rs = ps.executeQuery()) {
List<Long> idList = new ArrayList<>();
while (rs.next()) {
idList.add(rs.getLong("view_id"));
}
return idList;
}
}
}
/**
* Updates bundle sequence to make room for divmanazer after the mapfull bundle.
* Uses private follow-up functions to do the update. This one only gets the bundles for a view in reversed order
* (to make it easier the loop and update the seqno on the follow up method).
*/
public static void injectDivmanazerAfterMapfull(Connection conn, long viewId, ConfigNState divmanazer, long mapfullId) throws SQLException {
String sql = "SELECT view_id, bundle_id, seqno, config, state, startup, bundleinstance\n" +
" FROM portti_view_bundle_seq where view_id = ? order by seqno DESC";
try (PreparedStatement ps = conn.prepareStatement(sql)) {
ps.setLong(1, viewId);
try (ResultSet rs = ps.executeQuery()) {
List<ConfigNState> bundles = new ArrayList<>();
while (rs.next()) {
ConfigNState cfg = new ConfigNState();
cfg.view_id = rs.getLong("view_id");
cfg.bundle_id = rs.getLong("bundle_id");
cfg.seqno = rs.getInt("seqno");
bundles.add(cfg);
}
// update seqno
// NOTE! bundles are ordered last to first ("wrong" order)
updateBundleSeq(conn, bundles, divmanazer, mapfullId);
}
}
}
/**
* Updates bundle seqnos to make room for one bundle to be placed after the "mapfull" bundle.
* Uses a follow-up method to inject the divmanazer bundle
* @param conn
* @param bundles
* @param divmanazer
* @param mapfullId
* @throws SQLException
*/
private static void updateBundleSeq(Connection conn, List<ConfigNState> bundles, ConfigNState divmanazer, long mapfullId) throws SQLException {
String sql = "UPDATE portti_view_bundle_seq SET"
+ " seqno = ?"
+ " WHERE view_id = ? AND bundle_id = ?";
try (PreparedStatement ps = conn.prepareStatement(sql)) {
int seqnoForDivManazer = -1;
long viewId = -1;
for (ConfigNState configAndState : bundles) {
if(configAndState.bundle_id == mapfullId) {
viewId = configAndState.view_id;
break;
}
seqnoForDivManazer = configAndState.seqno;
ps.setInt(1, configAndState.seqno + 1);
ps.setLong(2, configAndState.view_id);
ps.setLong(3, configAndState.bundle_id);
ps.addBatch();
}
ps.executeBatch();
if(seqnoForDivManazer == -1) {
throw new RuntimeException("Couldn't find index to fit divmanazer in");
}
divmanazer.seqno = seqnoForDivManazer;
insertDivmanazer(conn, divmanazer, seqnoForDivManazer, viewId);
}
}
/**
* Inserts divmanazer to a view with given sequence number
*/
private static void insertDivmanazer(Connection conn, ConfigNState divmanazer, int seqno, long viewId) throws SQLException {
final String sql ="INSERT INTO portti_view_bundle_seq" +
"(view_id, bundle_id, seqno, config, state, startup, bundleinstance) " +
"VALUES (?, ?, ?, ?, ?, ?, ?)";
try(final PreparedStatement statement =
conn.prepareStatement(sql)) {
statement.setLong(1, viewId);
statement.setLong(2, divmanazer.bundle_id);
statement.setInt(3, seqno);
statement.setString(4, divmanazer.config);
statement.setString(5, divmanazer.state);
statement.setString(6, divmanazer.startup);
statement.setString(7, "divmanazer");
statement.execute();
}
}
}
| content-resources/src/main/java/flyway/oskari/ThematicMapsViewHelper.java | package flyway.oskari;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
/**
* Database helper for statsgrid migration
*/
public class ThematicMapsViewHelper {
public static long getBundleId(Connection conn, String name) throws SQLException {
String sql = "SELECT id FROM portti_bundle WHERE name = ?";
try (PreparedStatement ps = conn.prepareStatement(sql)) {
ps.setString(1, name);
try (ResultSet rs = ps.executeQuery()) {
if (rs.next()) {
return rs.getLong("id");
}
return -1L;
}
}
}
public static ConfigNState getBundle(Connection conn, long id) throws SQLException {
String sql = "SELECT id, config, state, startup FROM portti_bundle WHERE id = ?";
try (PreparedStatement ps = conn.prepareStatement(sql)) {
ps.setLong(1, id);
try (ResultSet rs = ps.executeQuery()) {
if (rs.next()) {
ConfigNState cfg = new ConfigNState();
cfg.bundle_id = rs.getLong("id");
cfg.config = rs.getString("config");
cfg.state = rs.getString("state");
cfg.startup = rs.getString("startup");
return cfg;
}
throw new SQLException("Couldnt find bundle with id " +id);
}
}
}
public static List<ConfigNState> getConfigsAndStates(Connection conn, long bundleId, String bundlePath) throws SQLException {
String sql = "SELECT view_id, bundle_id, seqno, config, state, startup"
+ " FROM portti_view_bundle_seq WHERE bundle_id = ?";
try (PreparedStatement ps = conn.prepareStatement(sql)) {
ps.setLong(1, bundleId);
try (ResultSet rs = ps.executeQuery()) {
List<ConfigNState> configsAndStates = new ArrayList<>();
while (rs.next()) {
String startup = rs.getString("startup");
if(bundlePath != null && startup.contains(bundlePath)) {
// we are only interested in bundles with old path
// not in ones using the new path
continue;
}
ConfigNState cfg = new ConfigNState();
cfg.view_id = rs.getLong("view_id");
cfg.bundle_id = rs.getLong("bundle_id");
cfg.seqno = rs.getInt("seqno");
cfg.config = rs.getString("config");
cfg.state = rs.getString("state");
configsAndStates.add(cfg);
}
return configsAndStates;
}
}
}
public static void update(Connection conn, List<ConfigNState> configsAndStates) throws SQLException {
String sql = "UPDATE portti_view_bundle_seq SET"
+ " config = ?, state = ?"
+ " WHERE view_id = ? AND bundle_id = ? AND seqno = ?";
try (PreparedStatement ps = conn.prepareStatement(sql)) {
for (ConfigNState configAndState : configsAndStates) {
ps.setString(1, configAndState.config);
ps.setString(2, configAndState.state);
ps.setLong(3, configAndState.view_id);
ps.setLong(4, configAndState.bundle_id);
ps.setInt(5, configAndState.seqno);
ps.addBatch();
}
ps.executeBatch();
}
}
public static void switchBundle(Connection conn, long old_bundle_id, long new_bundle_id) throws SQLException {
final String startup = getBundle(conn, new_bundle_id).startup;
String sql = "UPDATE portti_view_bundle_seq SET"
+ " startup = ?, bundle_id= ?, bundleinstance='statsgrid'"
+ " WHERE bundle_id = ?";
try (PreparedStatement ps = conn.prepareStatement(sql)) {
ps.setString(1, startup);
ps.setLong(2, new_bundle_id);
ps.setLong(3, old_bundle_id);
ps.execute();
}
}
/**
* Returns a list of view ids that contain a bundle (statsgrid), but don't have divmanazer (required by the new statsgrid)
*/
public static List<Long> findAppsetupsHavingBundleButNoDivmanazer(Connection conn, long bundleId) throws SQLException {
String sql = "SELECT view_id FROM portti_view_bundle_seq WHERE bundle_id = ? \n" +
"and view_id not in (select view_id FROM portti_view_bundle_seq WHERE bundle_id = (select id from portti_bundle where name = 'divmanazer'))";
try (PreparedStatement ps = conn.prepareStatement(sql)) {
ps.setLong(1, bundleId);
try (ResultSet rs = ps.executeQuery()) {
List<Long> idList = new ArrayList<>();
while (rs.next()) {
idList.add(rs.getLong("view_id"));
}
return idList;
}
}
}
/**
* Updates bundle sequence to make room for divmanazer after the mapfull bundle.
* Uses private follow-up functions to do the update. This one only gets the bundles for a view in reversed order
* (to make it easier the loop and update the seqno on the follow up method).
*/
public static void injectDivmanazerAfterMapfull(Connection conn, long viewId, ConfigNState divmanazer, long mapfullId) throws SQLException {
String sql = "SELECT view_id, bundle_id, seqno, config, state, startup, bundleinstance\n" +
" FROM portti_view_bundle_seq where view_id = ? order by seqno DESC";
try (PreparedStatement ps = conn.prepareStatement(sql)) {
ps.setLong(1, viewId);
try (ResultSet rs = ps.executeQuery()) {
List<ConfigNState> bundles = new ArrayList<>();
while (rs.next()) {
ConfigNState cfg = new ConfigNState();
cfg.view_id = rs.getLong("view_id");
cfg.bundle_id = rs.getLong("bundle_id");
cfg.seqno = rs.getInt("seqno");
bundles.add(cfg);
}
// update seqno
// NOTE! bundles are ordered last to first ("wrong" order)
updateBundleSeq(conn, bundles, divmanazer, mapfullId);
}
}
}
/**
* Updates bundle seqnos to make room for one bundle to be placed after the "mapfull" bundle.
* Uses a follow-up method to inject the divmanazer bundle
* @param conn
* @param bundles
* @param divmanazer
* @param mapfullId
* @throws SQLException
*/
private static void updateBundleSeq(Connection conn, List<ConfigNState> bundles, ConfigNState divmanazer, long mapfullId) throws SQLException {
String sql = "UPDATE portti_view_bundle_seq SET"
+ " seqno = ?"
+ " WHERE view_id = ? AND bundle_id = ?";
try (PreparedStatement ps = conn.prepareStatement(sql)) {
int seqnoForDivManazer = -1;
long viewId = -1;
for (ConfigNState configAndState : bundles) {
if(configAndState.bundle_id == mapfullId) {
viewId = configAndState.view_id;
break;
}
seqnoForDivManazer = configAndState.seqno;
ps.setInt(1, configAndState.seqno + 1);
ps.setLong(2, configAndState.view_id);
ps.setLong(3, configAndState.bundle_id);
ps.addBatch();
}
ps.executeBatch();
if(seqnoForDivManazer == -1) {
throw new RuntimeException("Couldn't find index to fit divmanazer in");
}
divmanazer.seqno = seqnoForDivManazer;
insertDivmanazer(conn, divmanazer, seqnoForDivManazer, viewId);
}
}
/**
* Inserts divmanazer to a view with given sequence number
*/
private static void insertDivmanazer(Connection conn, ConfigNState divmanazer, int seqno, long viewId) throws SQLException {
final String sql ="INSERT INTO portti_view_bundle_seq" +
"(view_id, bundle_id, seqno, config, state, startup, bundleinstance) " +
"VALUES (?, ?, ?, ?, ?, ?, ?)";
try(final PreparedStatement statement =
conn.prepareStatement(sql)) {
statement.setLong(1, viewId);
statement.setLong(2, divmanazer.bundle_id);
statement.setInt(3, seqno);
statement.setString(4, divmanazer.config);
statement.setString(5, divmanazer.state);
statement.setString(6, divmanazer.startup);
statement.setString(7, "divmanazer");
statement.execute();
}
}
}
| Optimization now filters new statsgrids out and old ones in as it should and not the other way around
| content-resources/src/main/java/flyway/oskari/ThematicMapsViewHelper.java | Optimization now filters new statsgrids out and old ones in as it should and not the other way around | <ide><path>ontent-resources/src/main/java/flyway/oskari/ThematicMapsViewHelper.java
<ide> List<ConfigNState> configsAndStates = new ArrayList<>();
<ide> while (rs.next()) {
<ide> String startup = rs.getString("startup");
<del> if(bundlePath != null && startup.contains(bundlePath)) {
<add> if(bundlePath != null && !startup.contains(bundlePath)) {
<ide> // we are only interested in bundles with old path
<ide> // not in ones using the new path
<ide> continue; |
|
Java | apache-2.0 | 667b6b022afb8c4b0caece2b105b15ffee290cba | 0 | dslomov/bazel,aehlig/bazel,davidzchen/bazel,twitter-forks/bazel,dslomov/bazel,ulfjack/bazel,ulfjack/bazel,perezd/bazel,akira-baruah/bazel,dslomov/bazel-windows,werkt/bazel,meteorcloudy/bazel,twitter-forks/bazel,ulfjack/bazel,cushon/bazel,ulfjack/bazel,katre/bazel,werkt/bazel,katre/bazel,twitter-forks/bazel,werkt/bazel,dslomov/bazel-windows,meteorcloudy/bazel,safarmer/bazel,davidzchen/bazel,katre/bazel,perezd/bazel,akira-baruah/bazel,katre/bazel,bazelbuild/bazel,perezd/bazel,safarmer/bazel,bazelbuild/bazel,ButterflyNetwork/bazel,dslomov/bazel-windows,safarmer/bazel,bazelbuild/bazel,ButterflyNetwork/bazel,akira-baruah/bazel,cushon/bazel,dslomov/bazel-windows,meteorcloudy/bazel,davidzchen/bazel,cushon/bazel,dslomov/bazel,davidzchen/bazel,bazelbuild/bazel,dslomov/bazel,perezd/bazel,meteorcloudy/bazel,davidzchen/bazel,perezd/bazel,dslomov/bazel,cushon/bazel,werkt/bazel,aehlig/bazel,davidzchen/bazel,katre/bazel,twitter-forks/bazel,aehlig/bazel,ulfjack/bazel,davidzchen/bazel,bazelbuild/bazel,twitter-forks/bazel,ButterflyNetwork/bazel,bazelbuild/bazel,cushon/bazel,ulfjack/bazel,ulfjack/bazel,aehlig/bazel,katre/bazel,meteorcloudy/bazel,werkt/bazel,safarmer/bazel,aehlig/bazel,meteorcloudy/bazel,ButterflyNetwork/bazel,dslomov/bazel,meteorcloudy/bazel,dslomov/bazel-windows,dslomov/bazel,akira-baruah/bazel,akira-baruah/bazel,werkt/bazel,perezd/bazel,ButterflyNetwork/bazel,cushon/bazel,safarmer/bazel,aehlig/bazel,twitter-forks/bazel,ButterflyNetwork/bazel,dslomov/bazel-windows,twitter-forks/bazel,safarmer/bazel,perezd/bazel,aehlig/bazel,akira-baruah/bazel | // Copyright 2018 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.remote.blobstore.http;
import static com.google.devtools.build.lib.remote.util.Utils.getFromFuture;
import com.google.auth.Credentials;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import com.google.devtools.build.lib.remote.blobstore.SimpleBlobStore;
import io.netty.bootstrap.Bootstrap;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelOption;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.epoll.Epoll;
import io.netty.channel.epoll.EpollDomainSocketChannel;
import io.netty.channel.epoll.EpollEventLoopGroup;
import io.netty.channel.kqueue.KQueue;
import io.netty.channel.kqueue.KQueueDomainSocketChannel;
import io.netty.channel.kqueue.KQueueEventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.pool.ChannelPool;
import io.netty.channel.pool.ChannelPoolHandler;
import io.netty.channel.pool.FixedChannelPool;
import io.netty.channel.pool.SimpleChannelPool;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.channel.unix.DomainSocketAddress;
import io.netty.handler.codec.http.HttpClientCodec;
import io.netty.handler.codec.http.HttpHeaderNames;
import io.netty.handler.codec.http.HttpObjectAggregator;
import io.netty.handler.codec.http.HttpRequestEncoder;
import io.netty.handler.codec.http.HttpResponse;
import io.netty.handler.codec.http.HttpResponseDecoder;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.netty.handler.ssl.OpenSsl;
import io.netty.handler.ssl.SslContext;
import io.netty.handler.ssl.SslContextBuilder;
import io.netty.handler.ssl.SslHandler;
import io.netty.handler.ssl.SslProvider;
import io.netty.handler.stream.ChunkedWriteHandler;
import io.netty.handler.timeout.ReadTimeoutHandler;
import io.netty.util.concurrent.Future;
import io.netty.util.concurrent.Promise;
import io.netty.util.internal.PlatformDependent;
import java.io.ByteArrayInputStream;
import java.io.FileInputStream;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.net.URI;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Function;
import java.util.regex.Pattern;
import javax.annotation.Nullable;
import javax.annotation.concurrent.GuardedBy;
import javax.net.ssl.SSLEngine;
/**
* Implementation of {@link SimpleBlobStore} that can talk to a HTTP/1.1 backend.
*
* <p>Blobs (Binary large objects) are uploaded using the {@code PUT} method. Action cache blobs are
* stored under the path {@code /ac/base16-key}. CAS (Content Addressable Storage) blobs are stored
* under the path {@code /cas/base16-key}. Valid status codes for a successful upload are 200 (OK),
* 201 (CREATED), 202 (ACCEPTED) and 204 (NO CONTENT). It's recommended to return 200 (OK) on
* success. The other status codes are supported to be compatibility with the nginx webdav module
* and may be removed in the future.
*
* <p>Blobs are downloaded using the {@code GET} method at the paths they were stored at. A status
* code of 200 should be followed by the content of the blob. The status codes 404 (NOT FOUND) and
* 204 (NO CONTENT) indicate that no cache entry exists. It's recommended to return 404 (NOT FOUND)
* as the 204 (NO CONTENT) status code is only supported for compatibility with the nginx webdav
* module.
*
* <p>TLS is supported and enabled automatically when using HTTPS as the URI scheme.
*
* <p>Uploads do not use {@code Expect: 100-CONTINUE} headers, as this would incur an additional
* roundtrip for every upload and with little practical value as we would expect most uploads to be
* accepted.
*
* <p>The implementation currently does not support transfer encoding chunked.
*/
public final class HttpBlobStore implements SimpleBlobStore {
private static final Pattern INVALID_TOKEN_ERROR =
Pattern.compile("\\s*error\\s*=\\s*\"?invalid_token\"?");
private final EventLoopGroup eventLoop;
private final ChannelPool channelPool;
private final URI uri;
private final int timeoutMillis;
private final boolean useTls;
private final Object closeLock = new Object();
@GuardedBy("closeLock")
private boolean isClosed;
private final Object credentialsLock = new Object();
@GuardedBy("credentialsLock")
private final Credentials creds;
@GuardedBy("credentialsLock")
private long lastRefreshTime;
public static HttpBlobStore create(URI uri, int timeoutMillis,
int remoteMaxConnections, @Nullable final Credentials creds)
throws Exception {
return new HttpBlobStore(
NioEventLoopGroup::new,
NioSocketChannel.class,
uri, timeoutMillis, remoteMaxConnections, creds,
null);
}
public static HttpBlobStore create(
DomainSocketAddress domainSocketAddress,
URI uri, int timeoutMillis, int remoteMaxConnections, @Nullable final Credentials creds)
throws Exception {
if (KQueue.isAvailable()) {
return new HttpBlobStore(
KQueueEventLoopGroup::new,
KQueueDomainSocketChannel.class,
uri, timeoutMillis, remoteMaxConnections, creds,
domainSocketAddress);
} else if (Epoll.isAvailable()) {
return new HttpBlobStore(
EpollEventLoopGroup::new,
EpollDomainSocketChannel.class,
uri, timeoutMillis, remoteMaxConnections, creds,
domainSocketAddress);
} else {
throw new Exception("Unix domain sockets are unsupported on this platform");
}
}
private HttpBlobStore(
Function<Integer, EventLoopGroup> newEventLoopGroup,
Class<? extends Channel> channelClass,
URI uri, int timeoutMillis, int remoteMaxConnections, @Nullable final Credentials creds,
@Nullable SocketAddress socketAddress)
throws Exception {
useTls = uri.getScheme().equals("https");
if (uri.getPort() == -1) {
int port = useTls ? 443 : 80;
uri =
new URI(
uri.getScheme(),
uri.getUserInfo(),
uri.getHost(),
port,
uri.getPath(),
uri.getQuery(),
uri.getFragment());
}
this.uri = uri;
if (socketAddress == null) {
socketAddress = new InetSocketAddress(uri.getHost(), uri.getPort());
}
final SslContext sslCtx;
if (useTls) {
// OpenSsl gives us a > 2x speed improvement on fast networks, but requires netty tcnative
// to be there which is not available on all platforms and environments.
SslProvider sslProvider = OpenSsl.isAvailable() ? SslProvider.OPENSSL : SslProvider.JDK;
sslCtx = SslContextBuilder.forClient().sslProvider(sslProvider).build();
} else {
sslCtx = null;
}
this.eventLoop = newEventLoopGroup.apply(2);
Bootstrap clientBootstrap =
new Bootstrap()
.channel(channelClass)
.option(ChannelOption.CONNECT_TIMEOUT_MILLIS, timeoutMillis)
.group(eventLoop)
.remoteAddress(socketAddress);
ChannelPoolHandler channelPoolHandler =
new ChannelPoolHandler() {
@Override
public void channelReleased(Channel ch) {}
@Override
public void channelAcquired(Channel ch) {}
@Override
public void channelCreated(Channel ch) {
ChannelPipeline p = ch.pipeline();
if (sslCtx != null) {
SSLEngine engine = sslCtx.newEngine(ch.alloc());
engine.setUseClientMode(true);
p.addFirst("ssl-handler", new SslHandler(engine));
}
}
};
if (remoteMaxConnections > 0) {
channelPool = new FixedChannelPool(clientBootstrap, channelPoolHandler, remoteMaxConnections);
} else {
channelPool = new SimpleChannelPool(clientBootstrap, channelPoolHandler);
}
this.creds = creds;
this.timeoutMillis = timeoutMillis;
}
@SuppressWarnings("FutureReturnValueIgnored")
private Channel acquireUploadChannel() throws InterruptedException {
Promise<Channel> channelReady = eventLoop.next().newPromise();
channelPool
.acquire()
.addListener(
(Future<Channel> channelAcquired) -> {
if (!channelAcquired.isSuccess()) {
channelReady.setFailure(channelAcquired.cause());
return;
}
try {
Channel ch = channelAcquired.getNow();
ChannelPipeline p = ch.pipeline();
if (!isChannelPipelineEmpty(p)) {
channelReady.setFailure(
new IllegalStateException("Channel pipeline is not empty."));
return;
}
p.addLast(new HttpResponseDecoder());
// The 10KiB limit was chosen at random. We only expect HTTP servers to respond with
// an error message in the body and that should always be less than 10KiB.
p.addLast(new HttpObjectAggregator(10 * 1024));
p.addLast(new HttpRequestEncoder());
p.addLast(new ChunkedWriteHandler());
synchronized (credentialsLock) {
p.addLast(new HttpUploadHandler(creds));
}
channelReady.setSuccess(ch);
} catch (Throwable t) {
channelReady.setFailure(t);
}
});
try {
return channelReady.get();
} catch (ExecutionException e) {
PlatformDependent.throwException(e.getCause());
return null;
}
}
@SuppressWarnings("FutureReturnValueIgnored")
private void releaseUploadChannel(Channel ch) {
if (ch.isOpen()) {
try {
ch.pipeline().remove(HttpResponseDecoder.class);
ch.pipeline().remove(HttpObjectAggregator.class);
ch.pipeline().remove(HttpRequestEncoder.class);
ch.pipeline().remove(ChunkedWriteHandler.class);
ch.pipeline().remove(HttpUploadHandler.class);
} catch (NoSuchElementException e) {
// If the channel is in the process of closing but not yet closed, some handlers could have
// been removed and would cause NoSuchElement exceptions to be thrown. Because handlers are
// removed in reverse-order, if we get a NoSuchElement exception, the following handlers
// should have been removed.
}
}
channelPool.release(ch);
}
@SuppressWarnings("FutureReturnValueIgnored")
private Future<Channel> acquireDownloadChannel() {
Promise<Channel> channelReady = eventLoop.next().newPromise();
channelPool
.acquire()
.addListener(
(Future<Channel> channelAcquired) -> {
if (!channelAcquired.isSuccess()) {
channelReady.setFailure(channelAcquired.cause());
return;
}
try {
Channel ch = channelAcquired.getNow();
ChannelPipeline p = ch.pipeline();
if (!isChannelPipelineEmpty(p)) {
channelReady.setFailure(
new IllegalStateException("Channel pipeline is not empty."));
return;
}
ch.pipeline()
.addFirst("read-timeout-handler", new ReadTimeoutHandler(timeoutMillis));
p.addLast(new HttpClientCodec());
synchronized (credentialsLock) {
p.addLast(new HttpDownloadHandler(creds));
}
channelReady.setSuccess(ch);
} catch (Throwable t) {
channelReady.setFailure(t);
}
});
return channelReady;
}
@SuppressWarnings("FutureReturnValueIgnored")
private void releaseDownloadChannel(Channel ch) {
if (ch.isOpen()) {
// The channel might have been closed due to an error, in which case its pipeline
// has already been cleared. Closed channels can't be reused.
try {
ch.pipeline().remove(ReadTimeoutHandler.class);
ch.pipeline().remove(HttpClientCodec.class);
ch.pipeline().remove(HttpDownloadHandler.class);
} catch (NoSuchElementException e) {
// If the channel is in the process of closing but not yet closed, some handlers could have
// been removed and would cause NoSuchElement exceptions to be thrown. Because handlers are
// removed in reverse-order, if we get a NoSuchElement exception, the following handlers
// should have been removed.
}
}
channelPool.release(ch);
}
private boolean isChannelPipelineEmpty(ChannelPipeline pipeline) {
return (pipeline.first() == null)
|| (useTls
&& "ssl-handler".equals(pipeline.firstContext().name())
&& pipeline.first() == pipeline.last());
}
@Override
public boolean containsKey(String key) {
throw new UnsupportedOperationException("HTTP Caching does not use this method.");
}
@Override
public ListenableFuture<Boolean> get(String key, OutputStream out) {
return get(key, out, true);
}
@SuppressWarnings("FutureReturnValueIgnored")
private ListenableFuture<Boolean> get(String key, final OutputStream out, boolean casDownload) {
final AtomicBoolean dataWritten = new AtomicBoolean();
OutputStream wrappedOut =
new OutputStream() {
// OutputStream.close() does nothing, which is what we want to ensure that the
// OutputStream can't be closed somewhere in the Netty pipeline, so that we can support
// retries. The OutputStream is closed in the finally block below.
@Override
public void write(byte[] b, int offset, int length) throws IOException {
dataWritten.set(true);
out.write(b, offset, length);
}
@Override
public void write(int b) throws IOException {
dataWritten.set(true);
out.write(b);
}
@Override
public void flush() throws IOException {
out.flush();
}
};
DownloadCommand download = new DownloadCommand(uri, casDownload, key, wrappedOut);
SettableFuture<Boolean> outerF = SettableFuture.create();
acquireDownloadChannel()
.addListener(
(Future<Channel> chP) -> {
if (!chP.isSuccess()) {
outerF.setException(chP.cause());
return;
}
Channel ch = chP.getNow();
ch.writeAndFlush(download)
.addListener(
(f) -> {
try {
if (f.isSuccess()) {
outerF.set(true);
} else {
Throwable cause = f.cause();
// cause can be of type HttpException, because Netty uses
// Unsafe.throwException to
// re-throw a checked exception that hasn't been declared in the method
// signature.
if (cause instanceof HttpException) {
HttpResponse response = ((HttpException) cause).response();
if (!dataWritten.get() && authTokenExpired(response)) {
// The error is due to an auth token having expired. Let's try
// again.
refreshCredentials();
getAfterCredentialRefresh(download, outerF);
return;
} else if (cacheMiss(response.status())) {
outerF.set(false);
return;
}
}
outerF.setException(cause);
}
} finally {
releaseDownloadChannel(ch);
}
});
});
return outerF;
}
@SuppressWarnings("FutureReturnValueIgnored")
private void getAfterCredentialRefresh(DownloadCommand cmd, SettableFuture<Boolean> outerF) {
acquireDownloadChannel()
.addListener(
(Future<Channel> chP) -> {
if (!chP.isSuccess()) {
outerF.setException(chP.cause());
return;
}
Channel ch = chP.getNow();
ch.writeAndFlush(cmd)
.addListener(
(f) -> {
try {
if (f.isSuccess()) {
outerF.set(true);
} else {
Throwable cause = f.cause();
if (cause instanceof HttpException) {
HttpResponse response = ((HttpException) cause).response();
if (cacheMiss(response.status())) {
outerF.set(false);
return;
}
}
outerF.setException(cause);
}
} finally {
releaseDownloadChannel(ch);
}
});
});
}
@Override
public boolean getActionResult(String actionKey, OutputStream out)
throws IOException, InterruptedException {
return getFromFuture(get(actionKey, out, false));
}
@Override
public void put(String key, long length, InputStream in)
throws IOException, InterruptedException {
put(key, length, in, true);
}
@SuppressWarnings("FutureReturnValueIgnored")
private void put(String key, long length, InputStream in, boolean casUpload)
throws IOException, InterruptedException {
InputStream wrappedIn =
new FilterInputStream(in) {
@Override
public void close() {
// Ensure that the InputStream can't be closed somewhere in the Netty
// pipeline, so that we can support retries. The InputStream is closed in
// the finally block below.
}
};
UploadCommand upload = new UploadCommand(uri, casUpload, key, wrappedIn, length);
Channel ch = null;
try {
ch = acquireUploadChannel();
ChannelFuture uploadFuture = ch.writeAndFlush(upload);
uploadFuture.sync();
} catch (Exception e) {
// e can be of type HttpException, because Netty uses Unsafe.throwException to re-throw a
// checked exception that hasn't been declared in the method signature.
if (e instanceof HttpException) {
HttpResponse response = ((HttpException) e).response();
if (authTokenExpired(response)) {
refreshCredentials();
// The error is due to an auth token having expired. Let's try again.
if (!reset(in)) {
// The InputStream can't be reset and thus we can't retry as most likely
// bytes have already been read from the InputStream.
throw e;
}
putAfterCredentialRefresh(upload);
return;
}
}
throw e;
} finally {
in.close();
if (ch != null) {
releaseUploadChannel(ch);
}
}
}
@SuppressWarnings("FutureReturnValueIgnored")
private void putAfterCredentialRefresh(UploadCommand cmd) throws InterruptedException {
Channel ch = null;
try {
ch = acquireUploadChannel();
ChannelFuture uploadFuture = ch.writeAndFlush(cmd);
uploadFuture.sync();
} finally {
if (ch != null) {
releaseUploadChannel(ch);
}
}
}
private boolean reset(InputStream in) throws IOException {
if (in.markSupported()) {
in.reset();
return true;
}
if (in instanceof FileInputStream) {
// FileInputStream does not support reset().
((FileInputStream) in).getChannel().position(0);
return true;
}
return false;
}
@Override
public void putActionResult(String actionKey, byte[] data)
throws IOException, InterruptedException {
try (InputStream in = new ByteArrayInputStream(data)) {
put(actionKey, data.length, in, false);
}
}
/**
* It's safe to suppress this warning because all methods on Netty futures return {@code this}. So
* we are not ignoring anything.
*/
@SuppressWarnings("FutureReturnValueIgnored")
@Override
public void close() {
synchronized (closeLock) {
if (isClosed) {
return;
}
isClosed = true;
channelPool.close();
eventLoop.shutdownGracefully();
}
}
private boolean cacheMiss(HttpResponseStatus status) {
// Supporting NO_CONTENT for nginx webdav compatibility.
return status.equals(HttpResponseStatus.NOT_FOUND)
|| status.equals(HttpResponseStatus.NO_CONTENT);
}
/** See https://tools.ietf.org/html/rfc6750#section-3.1 */
private boolean authTokenExpired(HttpResponse response) {
synchronized (credentialsLock) {
if (creds == null) {
return false;
}
}
List<String> values = response.headers().getAllAsString(HttpHeaderNames.WWW_AUTHENTICATE);
String value = String.join(",", values);
if (value != null && value.startsWith("Bearer")) {
return INVALID_TOKEN_ERROR.matcher(value).find();
} else {
return response.status().equals(HttpResponseStatus.UNAUTHORIZED);
}
}
private void refreshCredentials() throws IOException {
synchronized (credentialsLock) {
long now = System.currentTimeMillis();
// Call creds.refresh() at most once per second. The one second was arbitrarily chosen, as
// a small enough value that we don't expect to interfere with actual token lifetimes, but
// it should just make sure that potentially hundreds of threads don't call this method
// at the same time.
if ((now - lastRefreshTime) > TimeUnit.SECONDS.toMillis(1)) {
lastRefreshTime = now;
creds.refresh();
}
}
}
}
| src/main/java/com/google/devtools/build/lib/remote/blobstore/http/HttpBlobStore.java | // Copyright 2018 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.remote.blobstore.http;
import static com.google.devtools.build.lib.remote.util.Utils.getFromFuture;
import com.google.auth.Credentials;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import com.google.devtools.build.lib.remote.blobstore.SimpleBlobStore;
import io.netty.bootstrap.Bootstrap;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelOption;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.epoll.Epoll;
import io.netty.channel.epoll.EpollDomainSocketChannel;
import io.netty.channel.epoll.EpollEventLoopGroup;
import io.netty.channel.kqueue.KQueue;
import io.netty.channel.kqueue.KQueueDomainSocketChannel;
import io.netty.channel.kqueue.KQueueEventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.pool.ChannelPool;
import io.netty.channel.pool.ChannelPoolHandler;
import io.netty.channel.pool.FixedChannelPool;
import io.netty.channel.pool.SimpleChannelPool;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.channel.unix.DomainSocketAddress;
import io.netty.handler.codec.http.HttpClientCodec;
import io.netty.handler.codec.http.HttpHeaderNames;
import io.netty.handler.codec.http.HttpObjectAggregator;
import io.netty.handler.codec.http.HttpRequestEncoder;
import io.netty.handler.codec.http.HttpResponse;
import io.netty.handler.codec.http.HttpResponseDecoder;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.netty.handler.ssl.OpenSsl;
import io.netty.handler.ssl.SslContext;
import io.netty.handler.ssl.SslContextBuilder;
import io.netty.handler.ssl.SslHandler;
import io.netty.handler.ssl.SslProvider;
import io.netty.handler.stream.ChunkedWriteHandler;
import io.netty.handler.timeout.ReadTimeoutHandler;
import io.netty.util.concurrent.Future;
import io.netty.util.concurrent.Promise;
import io.netty.util.internal.PlatformDependent;
import java.io.ByteArrayInputStream;
import java.io.FileInputStream;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.net.URI;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Function;
import java.util.regex.Pattern;
import javax.annotation.Nullable;
import javax.annotation.concurrent.GuardedBy;
import javax.net.ssl.SSLEngine;
/**
* Implementation of {@link SimpleBlobStore} that can talk to a HTTP/1.1 backend.
*
* <p>Blobs (Binary large objects) are uploaded using the {@code PUT} method. Action cache blobs are
* stored under the path {@code /ac/base16-key}. CAS (Content Addressable Storage) blobs are stored
* under the path {@code /cas/base16-key}. Valid status codes for a successful upload are 200 (OK),
* 201 (CREATED), 202 (ACCEPTED) and 204 (NO CONTENT). It's recommended to return 200 (OK) on
* success. The other status codes are supported to be compatibility with the nginx webdav module
* and may be removed in the future.
*
* <p>Blobs are downloaded using the {@code GET} method at the paths they were stored at. A status
* code of 200 should be followed by the content of the blob. The status codes 404 (NOT FOUND) and
* 204 (NO CONTENT) indicate that no cache entry exists. It's recommended to return 404 (NOT FOUND)
* as the 204 (NO CONTENT) status code is only supported for compatibility with the nginx webdav
* module.
*
* <p>TLS is supported and enabled automatically when using HTTPS as the URI scheme.
*
* <p>Uploads do not use {@code Expect: 100-CONTINUE} headers, as this would incur an additional
* roundtrip for every upload and with little practical value as we would expect most uploads to be
* accepted.
*
* <p>The implementation currently does not support transfer encoding chunked.
*/
public final class HttpBlobStore implements SimpleBlobStore {
private static final Pattern INVALID_TOKEN_ERROR =
Pattern.compile("\\s*error\\s*=\\s*\"?invalid_token\"?");
private final EventLoopGroup eventLoop;
private final ChannelPool channelPool;
private final URI uri;
private final int timeoutMillis;
private final Object closeLock = new Object();
@GuardedBy("closeLock")
private boolean isClosed;
private final Object credentialsLock = new Object();
@GuardedBy("credentialsLock")
private final Credentials creds;
@GuardedBy("credentialsLock")
private long lastRefreshTime;
public static HttpBlobStore create(URI uri, int timeoutMillis,
int remoteMaxConnections, @Nullable final Credentials creds)
throws Exception {
return new HttpBlobStore(
NioEventLoopGroup::new,
NioSocketChannel.class,
uri, timeoutMillis, remoteMaxConnections, creds,
null);
}
public static HttpBlobStore create(
DomainSocketAddress domainSocketAddress,
URI uri, int timeoutMillis, int remoteMaxConnections, @Nullable final Credentials creds)
throws Exception {
if (KQueue.isAvailable()) {
return new HttpBlobStore(
KQueueEventLoopGroup::new,
KQueueDomainSocketChannel.class,
uri, timeoutMillis, remoteMaxConnections, creds,
domainSocketAddress);
} else if (Epoll.isAvailable()) {
return new HttpBlobStore(
EpollEventLoopGroup::new,
EpollDomainSocketChannel.class,
uri, timeoutMillis, remoteMaxConnections, creds,
domainSocketAddress);
} else {
throw new Exception("Unix domain sockets are unsupported on this platform");
}
}
private HttpBlobStore(
Function<Integer, EventLoopGroup> newEventLoopGroup,
Class<? extends Channel> channelClass,
URI uri, int timeoutMillis, int remoteMaxConnections, @Nullable final Credentials creds,
@Nullable SocketAddress socketAddress)
throws Exception {
boolean useTls = uri.getScheme().equals("https");
if (uri.getPort() == -1) {
int port = useTls ? 443 : 80;
uri =
new URI(
uri.getScheme(),
uri.getUserInfo(),
uri.getHost(),
port,
uri.getPath(),
uri.getQuery(),
uri.getFragment());
}
this.uri = uri;
if (socketAddress == null) {
socketAddress = new InetSocketAddress(uri.getHost(), uri.getPort());
}
final SslContext sslCtx;
if (useTls) {
// OpenSsl gives us a > 2x speed improvement on fast networks, but requires netty tcnative
// to be there which is not available on all platforms and environments.
SslProvider sslProvider = OpenSsl.isAvailable() ? SslProvider.OPENSSL : SslProvider.JDK;
sslCtx = SslContextBuilder.forClient().sslProvider(sslProvider).build();
} else {
sslCtx = null;
}
this.eventLoop = newEventLoopGroup.apply(2);
Bootstrap clientBootstrap =
new Bootstrap()
.channel(channelClass)
.option(ChannelOption.CONNECT_TIMEOUT_MILLIS, timeoutMillis)
.group(eventLoop)
.remoteAddress(socketAddress);
ChannelPoolHandler channelPoolHandler =
new ChannelPoolHandler() {
@Override
public void channelReleased(Channel ch) {}
@Override
public void channelAcquired(Channel ch) {}
@Override
public void channelCreated(Channel ch) {
ChannelPipeline p = ch.pipeline();
if (sslCtx != null) {
SSLEngine engine = sslCtx.newEngine(ch.alloc());
engine.setUseClientMode(true);
p.addFirst("ssl-handler", new SslHandler(engine));
}
}
};
if (remoteMaxConnections > 0) {
channelPool = new FixedChannelPool(clientBootstrap, channelPoolHandler, remoteMaxConnections);
} else {
channelPool = new SimpleChannelPool(clientBootstrap, channelPoolHandler);
}
this.creds = creds;
this.timeoutMillis = timeoutMillis;
}
@SuppressWarnings("FutureReturnValueIgnored")
private Channel acquireUploadChannel() throws InterruptedException {
Promise<Channel> channelReady = eventLoop.next().newPromise();
channelPool
.acquire()
.addListener(
(Future<Channel> channelAcquired) -> {
if (!channelAcquired.isSuccess()) {
channelReady.setFailure(channelAcquired.cause());
return;
}
try {
Channel ch = channelAcquired.getNow();
ChannelPipeline p = ch.pipeline();
p.addLast(new HttpResponseDecoder());
// The 10KiB limit was chosen at random. We only expect HTTP servers to respond with
// an error message in the body and that should always be less than 10KiB.
p.addLast(new HttpObjectAggregator(10 * 1024));
p.addLast(new HttpRequestEncoder());
p.addLast(new ChunkedWriteHandler());
synchronized (credentialsLock) {
p.addLast(new HttpUploadHandler(creds));
}
channelReady.setSuccess(ch);
} catch (Throwable t) {
channelReady.setFailure(t);
}
});
try {
return channelReady.get();
} catch (ExecutionException e) {
PlatformDependent.throwException(e.getCause());
return null;
}
}
@SuppressWarnings("FutureReturnValueIgnored")
private void releaseUploadChannel(Channel ch) {
if (ch.isOpen()) {
ch.pipeline().remove(HttpResponseDecoder.class);
ch.pipeline().remove(HttpObjectAggregator.class);
ch.pipeline().remove(HttpRequestEncoder.class);
ch.pipeline().remove(ChunkedWriteHandler.class);
ch.pipeline().remove(HttpUploadHandler.class);
}
channelPool.release(ch);
}
@SuppressWarnings("FutureReturnValueIgnored")
private Future<Channel> acquireDownloadChannel() {
Promise<Channel> channelReady = eventLoop.next().newPromise();
channelPool
.acquire()
.addListener(
(Future<Channel> channelAcquired) -> {
if (!channelAcquired.isSuccess()) {
channelReady.setFailure(channelAcquired.cause());
return;
}
try {
Channel ch = channelAcquired.getNow();
ChannelPipeline p = ch.pipeline();
ch.pipeline()
.addFirst("read-timeout-handler", new ReadTimeoutHandler(timeoutMillis));
p.addLast(new HttpClientCodec());
synchronized (credentialsLock) {
p.addLast(new HttpDownloadHandler(creds));
}
channelReady.setSuccess(ch);
} catch (Throwable t) {
channelReady.setFailure(t);
}
});
return channelReady;
}
@SuppressWarnings("FutureReturnValueIgnored")
private void releaseDownloadChannel(Channel ch) {
if (ch.isOpen()) {
// The channel might have been closed due to an error, in which case its pipeline
// has already been cleared. Closed channels can't be reused.
ch.pipeline().remove(ReadTimeoutHandler.class);
ch.pipeline().remove(HttpClientCodec.class);
ch.pipeline().remove(HttpDownloadHandler.class);
}
channelPool.release(ch);
}
@Override
public boolean containsKey(String key) {
throw new UnsupportedOperationException("HTTP Caching does not use this method.");
}
@Override
public ListenableFuture<Boolean> get(String key, OutputStream out) {
return get(key, out, true);
}
@SuppressWarnings("FutureReturnValueIgnored")
private ListenableFuture<Boolean> get(String key, final OutputStream out, boolean casDownload) {
final AtomicBoolean dataWritten = new AtomicBoolean();
OutputStream wrappedOut =
new OutputStream() {
// OutputStream.close() does nothing, which is what we want to ensure that the
// OutputStream can't be closed somewhere in the Netty pipeline, so that we can support
// retries. The OutputStream is closed in the finally block below.
@Override
public void write(byte[] b, int offset, int length) throws IOException {
dataWritten.set(true);
out.write(b, offset, length);
}
@Override
public void write(int b) throws IOException {
dataWritten.set(true);
out.write(b);
}
@Override
public void flush() throws IOException {
out.flush();
}
};
DownloadCommand download = new DownloadCommand(uri, casDownload, key, wrappedOut);
SettableFuture<Boolean> outerF = SettableFuture.create();
acquireDownloadChannel()
.addListener(
(Future<Channel> chP) -> {
if (!chP.isSuccess()) {
outerF.setException(chP.cause());
return;
}
Channel ch = chP.getNow();
ch.writeAndFlush(download)
.addListener(
(f) -> {
try {
if (f.isSuccess()) {
outerF.set(true);
} else {
Throwable cause = f.cause();
// cause can be of type HttpException, because Netty uses
// Unsafe.throwException to
// re-throw a checked exception that hasn't been declared in the method
// signature.
if (cause instanceof HttpException) {
HttpResponse response = ((HttpException) cause).response();
if (!dataWritten.get() && authTokenExpired(response)) {
// The error is due to an auth token having expired. Let's try
// again.
refreshCredentials();
getAfterCredentialRefresh(download, outerF);
return;
} else if (cacheMiss(response.status())) {
outerF.set(false);
return;
}
}
outerF.setException(cause);
}
} finally {
releaseDownloadChannel(ch);
}
});
});
return outerF;
}
@SuppressWarnings("FutureReturnValueIgnored")
private void getAfterCredentialRefresh(DownloadCommand cmd, SettableFuture<Boolean> outerF) {
acquireDownloadChannel()
.addListener(
(Future<Channel> chP) -> {
if (!chP.isSuccess()) {
outerF.setException(chP.cause());
return;
}
Channel ch = chP.getNow();
ch.writeAndFlush(cmd)
.addListener(
(f) -> {
try {
if (f.isSuccess()) {
outerF.set(true);
} else {
Throwable cause = f.cause();
if (cause instanceof HttpException) {
HttpResponse response = ((HttpException) cause).response();
if (cacheMiss(response.status())) {
outerF.set(false);
return;
}
}
outerF.setException(cause);
}
} finally {
releaseDownloadChannel(ch);
}
});
});
}
@Override
public boolean getActionResult(String actionKey, OutputStream out)
throws IOException, InterruptedException {
return getFromFuture(get(actionKey, out, false));
}
@Override
public void put(String key, long length, InputStream in)
throws IOException, InterruptedException {
put(key, length, in, true);
}
@SuppressWarnings("FutureReturnValueIgnored")
private void put(String key, long length, InputStream in, boolean casUpload)
throws IOException, InterruptedException {
InputStream wrappedIn =
new FilterInputStream(in) {
@Override
public void close() {
// Ensure that the InputStream can't be closed somewhere in the Netty
// pipeline, so that we can support retries. The InputStream is closed in
// the finally block below.
}
};
UploadCommand upload = new UploadCommand(uri, casUpload, key, wrappedIn, length);
Channel ch = null;
try {
ch = acquireUploadChannel();
ChannelFuture uploadFuture = ch.writeAndFlush(upload);
uploadFuture.sync();
} catch (Exception e) {
// e can be of type HttpException, because Netty uses Unsafe.throwException to re-throw a
// checked exception that hasn't been declared in the method signature.
if (e instanceof HttpException) {
HttpResponse response = ((HttpException) e).response();
if (authTokenExpired(response)) {
refreshCredentials();
// The error is due to an auth token having expired. Let's try again.
if (!reset(in)) {
// The InputStream can't be reset and thus we can't retry as most likely
// bytes have already been read from the InputStream.
throw e;
}
putAfterCredentialRefresh(upload);
return;
}
}
throw e;
} finally {
in.close();
if (ch != null) {
releaseUploadChannel(ch);
}
}
}
@SuppressWarnings("FutureReturnValueIgnored")
private void putAfterCredentialRefresh(UploadCommand cmd) throws InterruptedException {
Channel ch = null;
try {
ch = acquireUploadChannel();
ChannelFuture uploadFuture = ch.writeAndFlush(cmd);
uploadFuture.sync();
} finally {
if (ch != null) {
releaseUploadChannel(ch);
}
}
}
private boolean reset(InputStream in) throws IOException {
if (in.markSupported()) {
in.reset();
return true;
}
if (in instanceof FileInputStream) {
// FileInputStream does not support reset().
((FileInputStream) in).getChannel().position(0);
return true;
}
return false;
}
@Override
public void putActionResult(String actionKey, byte[] data)
throws IOException, InterruptedException {
try (InputStream in = new ByteArrayInputStream(data)) {
put(actionKey, data.length, in, false);
}
}
/**
* It's safe to suppress this warning because all methods on Netty futures return {@code this}. So
* we are not ignoring anything.
*/
@SuppressWarnings("FutureReturnValueIgnored")
@Override
public void close() {
synchronized (closeLock) {
if (isClosed) {
return;
}
isClosed = true;
channelPool.close();
eventLoop.shutdownGracefully();
}
}
private boolean cacheMiss(HttpResponseStatus status) {
// Supporting NO_CONTENT for nginx webdav compatibility.
return status.equals(HttpResponseStatus.NOT_FOUND)
|| status.equals(HttpResponseStatus.NO_CONTENT);
}
/** See https://tools.ietf.org/html/rfc6750#section-3.1 */
private boolean authTokenExpired(HttpResponse response) {
synchronized (credentialsLock) {
if (creds == null) {
return false;
}
}
List<String> values = response.headers().getAllAsString(HttpHeaderNames.WWW_AUTHENTICATE);
String value = String.join(",", values);
if (value != null && value.startsWith("Bearer")) {
return INVALID_TOKEN_ERROR.matcher(value).find();
} else {
return response.status().equals(HttpResponseStatus.UNAUTHORIZED);
}
}
private void refreshCredentials() throws IOException {
synchronized (credentialsLock) {
long now = System.currentTimeMillis();
// Call creds.refresh() at most once per second. The one second was arbitrarily chosen, as
// a small enough value that we don't expect to interfere with actual token lifetimes, but
// it should just make sure that potentially hundreds of threads don't call this method
// at the same time.
if ((now - lastRefreshTime) > TimeUnit.SECONDS.toMillis(1)) {
lastRefreshTime = now;
creds.refresh();
}
}
}
}
| Fix race condition with releaseUploadChannel().
A race condition can occur in `HttpBlobStore.releaseUploadChannel()` when `ChannelHandlerContext` is closed by `HttpUploadHandler.channelRead0`. Because `ChannelHandlerContext.close` runs asynchronously and closes all pipelines in reverse order (before the channel is marked as closed), `HttpBlobStore.releaseUploadChannel()` can be executed while channel is still open and handlers are being removed, causing an exception to be thrown by the `ch.pipeline().remove(X)` calls. This fix saves the status of the keepAlive response from the `HttpUploadHandler` so that the channel can be fully closed before releasing.
Resolves #5952
Closes #5953.
PiperOrigin-RevId: 211437717
| src/main/java/com/google/devtools/build/lib/remote/blobstore/http/HttpBlobStore.java | Fix race condition with releaseUploadChannel(). | <ide><path>rc/main/java/com/google/devtools/build/lib/remote/blobstore/http/HttpBlobStore.java
<ide> import java.net.SocketAddress;
<ide> import java.net.URI;
<ide> import java.util.List;
<add>import java.util.NoSuchElementException;
<ide> import java.util.concurrent.ExecutionException;
<ide> import java.util.concurrent.TimeUnit;
<ide> import java.util.concurrent.atomic.AtomicBoolean;
<ide> * <p>The implementation currently does not support transfer encoding chunked.
<ide> */
<ide> public final class HttpBlobStore implements SimpleBlobStore {
<del>
<ide> private static final Pattern INVALID_TOKEN_ERROR =
<ide> Pattern.compile("\\s*error\\s*=\\s*\"?invalid_token\"?");
<ide>
<ide> private final ChannelPool channelPool;
<ide> private final URI uri;
<ide> private final int timeoutMillis;
<add> private final boolean useTls;
<ide>
<ide> private final Object closeLock = new Object();
<ide>
<ide> URI uri, int timeoutMillis, int remoteMaxConnections, @Nullable final Credentials creds,
<ide> @Nullable SocketAddress socketAddress)
<ide> throws Exception {
<del> boolean useTls = uri.getScheme().equals("https");
<add> useTls = uri.getScheme().equals("https");
<ide> if (uri.getPort() == -1) {
<ide> int port = useTls ? 443 : 80;
<ide> uri =
<ide> try {
<ide> Channel ch = channelAcquired.getNow();
<ide> ChannelPipeline p = ch.pipeline();
<add>
<add> if (!isChannelPipelineEmpty(p)) {
<add> channelReady.setFailure(
<add> new IllegalStateException("Channel pipeline is not empty."));
<add> return;
<add> }
<add>
<ide> p.addLast(new HttpResponseDecoder());
<ide> // The 10KiB limit was chosen at random. We only expect HTTP servers to respond with
<ide> // an error message in the body and that should always be less than 10KiB.
<ide> @SuppressWarnings("FutureReturnValueIgnored")
<ide> private void releaseUploadChannel(Channel ch) {
<ide> if (ch.isOpen()) {
<del> ch.pipeline().remove(HttpResponseDecoder.class);
<del> ch.pipeline().remove(HttpObjectAggregator.class);
<del> ch.pipeline().remove(HttpRequestEncoder.class);
<del> ch.pipeline().remove(ChunkedWriteHandler.class);
<del> ch.pipeline().remove(HttpUploadHandler.class);
<add> try {
<add> ch.pipeline().remove(HttpResponseDecoder.class);
<add> ch.pipeline().remove(HttpObjectAggregator.class);
<add> ch.pipeline().remove(HttpRequestEncoder.class);
<add> ch.pipeline().remove(ChunkedWriteHandler.class);
<add> ch.pipeline().remove(HttpUploadHandler.class);
<add> } catch (NoSuchElementException e) {
<add> // If the channel is in the process of closing but not yet closed, some handlers could have
<add> // been removed and would cause NoSuchElement exceptions to be thrown. Because handlers are
<add> // removed in reverse-order, if we get a NoSuchElement exception, the following handlers
<add> // should have been removed.
<add> }
<ide> }
<ide> channelPool.release(ch);
<ide> }
<ide> try {
<ide> Channel ch = channelAcquired.getNow();
<ide> ChannelPipeline p = ch.pipeline();
<add>
<add> if (!isChannelPipelineEmpty(p)) {
<add> channelReady.setFailure(
<add> new IllegalStateException("Channel pipeline is not empty."));
<add> return;
<add> }
<add>
<ide> ch.pipeline()
<ide> .addFirst("read-timeout-handler", new ReadTimeoutHandler(timeoutMillis));
<ide> p.addLast(new HttpClientCodec());
<ide> if (ch.isOpen()) {
<ide> // The channel might have been closed due to an error, in which case its pipeline
<ide> // has already been cleared. Closed channels can't be reused.
<del> ch.pipeline().remove(ReadTimeoutHandler.class);
<del> ch.pipeline().remove(HttpClientCodec.class);
<del> ch.pipeline().remove(HttpDownloadHandler.class);
<add> try {
<add> ch.pipeline().remove(ReadTimeoutHandler.class);
<add> ch.pipeline().remove(HttpClientCodec.class);
<add> ch.pipeline().remove(HttpDownloadHandler.class);
<add> } catch (NoSuchElementException e) {
<add> // If the channel is in the process of closing but not yet closed, some handlers could have
<add> // been removed and would cause NoSuchElement exceptions to be thrown. Because handlers are
<add> // removed in reverse-order, if we get a NoSuchElement exception, the following handlers
<add> // should have been removed.
<add> }
<ide> }
<ide> channelPool.release(ch);
<add> }
<add>
<add> private boolean isChannelPipelineEmpty(ChannelPipeline pipeline) {
<add> return (pipeline.first() == null)
<add> || (useTls
<add> && "ssl-handler".equals(pipeline.firstContext().name())
<add> && pipeline.first() == pipeline.last());
<ide> }
<ide>
<ide> @Override |
|
Java | epl-1.0 | cac505c9635f45b4c5c47285f9bbab96309be6fd | 0 | cmaoling/portfolio,cmaoling/portfolio,buchen/portfolio,sebasbaumh/portfolio,cmaoling/portfolio,buchen/portfolio,cmaoling/portfolio,buchen/portfolio,sebasbaumh/portfolio,sebasbaumh/portfolio,sebasbaumh/portfolio,buchen/portfolio | package name.abuchen.portfolio.ui.util.swt;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.jface.action.IMenuListener;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.MouseEvent;
import org.eclipse.swt.events.MouseListener;
import org.eclipse.swt.events.MouseMoveListener;
import org.eclipse.swt.events.MouseTrackAdapter;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Layout;
import org.eclipse.swt.widgets.Shell;
import name.abuchen.portfolio.ui.Images;
import name.abuchen.portfolio.ui.util.ContextMenu;
/**
* A custom sash layout that supports double-clicking to hide and unhide one
* element. It supports only two children.
*/
public class SashLayout extends Layout
{
private class SashMouseTrackListener extends MouseTrackAdapter
{
@Override
public void mouseExit(MouseEvent e)
{
host.setCursor(null);
divider.setVisible(false);
}
@Override
public void mouseEnter(MouseEvent e)
{
divider.setVisible(true);
}
}
private class SashMouseMoveListener implements MouseMoveListener
{
@Override
public void mouseMove(MouseEvent e)
{
if (!isDragging)
{
if (sash.contains(e.x, e.y))
host.setCursor(host.getDisplay()
.getSystemCursor(isHorizontal ? SWT.CURSOR_SIZEWE : SWT.CURSOR_SIZENS));
else
host.setCursor(host.getDisplay().getSystemCursor(SWT.CURSOR_ARROW));
}
else
{
adjustSize(e.x, e.y);
host.layout();
host.update();
}
}
}
private class SashMouseListener implements MouseListener
{
@Override
public void mouseUp(MouseEvent e)
{
host.setCapture(false);
isDragging = false;
}
@Override
public void mouseDown(MouseEvent e)
{
if (e.button != 1)
return;
if (sash.contains(e.x, e.y))
{
isDragging = true;
host.setCapture(true);
}
}
@Override
public void mouseDoubleClick(MouseEvent e)
{
SashLayoutData data = getLayoutData(getChildren().get(isBeginning ? 0 : 1));
data.size *= -1;
host.layout();
host.update();
divider.setVisible(false);
}
}
private static final int SASH_WIDTH = 10;
private static final int MIN_WIDHT = 20;
/**
* orientation of the column arrangement
*/
private final boolean isHorizontal;
/**
* location of the auto-collapsable column
*/
private final boolean isBeginning;
private Composite host;
private Label divider;
private Rectangle sash = new Rectangle(0, 0, 1, 1);
private boolean isDragging = false;
public SashLayout(final Composite host, int style)
{
this.host = host;
this.host.setBackground(Display.getDefault().getSystemColor(SWT.COLOR_WHITE));
this.isHorizontal = (style & SWT.HORIZONTAL) == SWT.HORIZONTAL;
this.isBeginning = (style & SWT.BEGINNING) == SWT.BEGINNING;
this.divider = new Label(host, SWT.NONE);
this.divider.setImage(isHorizontal ? Images.HANDLE_NS.image() : Images.HANDLE_WE.image());
this.divider.setVisible(false);
MouseTrackAdapter mouseTrackListener = new SashMouseTrackListener();
host.addMouseTrackListener(mouseTrackListener);
divider.addMouseTrackListener(mouseTrackListener);
MouseMoveListener mouseMoveListener = new SashMouseMoveListener();
host.addMouseMoveListener(mouseMoveListener);
divider.addMouseMoveListener(e -> {
Point p = Display.getCurrent().map(divider, host, new Point(e.x, e.y));
e.x = p.x;
e.y = p.y;
mouseMoveListener.mouseMove(e);
});
MouseListener mouseListener = new SashMouseListener();
host.addMouseListener(mouseListener);
divider.addMouseListener(new SashMouseListener()
{
@Override
public void mouseDown(MouseEvent e)
{
Point p = Display.getCurrent().map(divider, host, new Point(e.x, e.y));
e.x = p.x;
e.y = p.y;
super.mouseDown(e);
}
});
}
public void addQuickNavigation(IMenuListener menuListener)
{
new ContextMenu(divider, menuListener).hook();
}
protected void adjustSize(int curX, int curY)
{
List<Control> children = getChildren();
Rectangle left = children.get(0).getBounds();
Rectangle right = children.get(1).getBounds();
if (left == null || right == null)
return;
int proposedSize = isHorizontal ? curX : curY;
int totalSize = isHorizontal ? left.width + right.width : left.height + right.height;
SashLayoutData data = getLayoutData(children.get(isBeginning ? 0 : 1));
// if collapsed, drag only if proposed size is bigger than min width. That
// excludes many accidental drags when trying to restore via double click.
if (data.size > 0 || (proposedSize > MIN_WIDHT && proposedSize < totalSize - MIN_WIDHT))
{
// ensure minimum size of a child (if not hidden)
proposedSize = Math.max(MIN_WIDHT, proposedSize);
proposedSize = Math.min(totalSize - MIN_WIDHT, proposedSize);
data.size = isBeginning ? proposedSize : totalSize - proposedSize;
}
}
@Override
protected Point computeSize(Composite composite, int wHint, int hHint, boolean flushCache)
{
return new Point(500, 200);
}
public List<Control> getChildren()
{
Control[] children = host.getChildren();
List<Control> answer = new ArrayList<>();
for (Control child : children)
if (child != divider)
answer.add(child);
return answer;
}
@Override
protected void layout(Composite composite, boolean flushCache)
{
List<Control> children = getChildren();
if (children.size() != 2)
throw new IllegalArgumentException();
Rectangle bounds = composite.getBounds();
if (composite instanceof Shell)
{
bounds = ((Shell) composite).getClientArea();
}
else
{
bounds.x = 0;
bounds.y = 0;
}
int availableSize = (isHorizontal ? bounds.width : bounds.height) - SASH_WIDTH;
int fixedSize = Math.max(0, getLayoutData(children.get(isBeginning ? 0 : 1)).size);
int remaining = Math.max(0, availableSize - fixedSize);
int pos = isHorizontal ? bounds.x : bounds.y;
pos += layout(children.get(0), pos, isBeginning ? fixedSize : remaining, bounds);
pos += layoutDivider(pos, bounds);
layout(children.get(1), pos, isBeginning ? remaining : fixedSize, bounds);
}
private int layout(Control control, int newPosition, int newSize, Rectangle bounds)
{
Rectangle subBounds = isHorizontal ? new Rectangle(newPosition, bounds.y, newSize, bounds.height)
: new Rectangle(bounds.x, newPosition, bounds.width, newSize);
control.setBounds(subBounds);
return newSize;
}
private int layoutDivider(final int newPosition, Rectangle bounds)
{
// position the divider rectangle
this.sash = isHorizontal ? new Rectangle(newPosition, bounds.y, SASH_WIDTH, bounds.height)
: new Rectangle(bounds.x, newPosition, bounds.width, SASH_WIDTH);
host.redraw(sash.x, sash.y, sash.width, sash.height, false);
// position the divider image handle
Rectangle imageBounds = divider.getImage().getBounds();
Rectangle dividerBounds = isHorizontal ? //
new Rectangle(newPosition + ((SASH_WIDTH - imageBounds.width) / 2),
(sash.height - imageBounds.height) / 2, imageBounds.width, imageBounds.height)
: new Rectangle((sash.width - imageBounds.width) / 2,
newPosition + (SASH_WIDTH - imageBounds.height) / 2, imageBounds.width,
imageBounds.height);
divider.setBounds(dividerBounds);
return SASH_WIDTH;
}
private SashLayoutData getLayoutData(Control control)
{
SashLayoutData data = (SashLayoutData) control.getLayoutData();
if (data == null)
{
data = new SashLayoutData();
control.setLayoutData(data);
}
return data;
}
}
| name.abuchen.portfolio.ui/src/name/abuchen/portfolio/ui/util/swt/SashLayout.java | package name.abuchen.portfolio.ui.util.swt;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.jface.action.IMenuListener;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.MouseEvent;
import org.eclipse.swt.events.MouseListener;
import org.eclipse.swt.events.MouseMoveListener;
import org.eclipse.swt.events.MouseTrackAdapter;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Layout;
import org.eclipse.swt.widgets.Shell;
import name.abuchen.portfolio.ui.Images;
import name.abuchen.portfolio.ui.util.ContextMenu;
/**
* A custom sash layout that supports double-clicking to hide and unhide one
* element. It supports only two children.
*/
public class SashLayout extends Layout
{
private class SashMouseTrackListener extends MouseTrackAdapter
{
@Override
public void mouseExit(MouseEvent e)
{
host.setCursor(null);
}
}
private class SashMouseMoveListener implements MouseMoveListener
{
@Override
public void mouseMove(MouseEvent e)
{
if (!isDragging)
{
if (sash.contains(e.x, e.y))
host.setCursor(host.getDisplay()
.getSystemCursor(isHorizontal ? SWT.CURSOR_SIZEWE : SWT.CURSOR_SIZENS));
else
host.setCursor(host.getDisplay().getSystemCursor(SWT.CURSOR_ARROW));
}
else
{
adjustSize(e.x, e.y);
host.layout();
host.update();
}
}
}
private class SashMouseListener implements MouseListener
{
@Override
public void mouseUp(MouseEvent e)
{
host.setCapture(false);
isDragging = false;
}
@Override
public void mouseDown(MouseEvent e)
{
if (e.button != 1)
return;
if (sash.contains(e.x, e.y))
{
isDragging = true;
host.setCapture(true);
}
}
@Override
public void mouseDoubleClick(MouseEvent e)
{
SashLayoutData data = getLayoutData(getChildren().get(isBeginning ? 0 : 1));
data.size *= -1;
host.layout();
host.update();
}
}
private static final int SASH_WIDTH = 10;
private static final int MIN_WIDHT = 20;
/**
* orientation of the column arrangement
*/
private final boolean isHorizontal;
/**
* location of the auto-collapsable column
*/
private final boolean isBeginning;
private Composite host;
private Label divider;
private Rectangle sash = new Rectangle(0, 0, 1, 1);
private boolean isDragging = false;
public SashLayout(final Composite host, int style)
{
this.host = host;
this.host.setBackground(Display.getDefault().getSystemColor(SWT.COLOR_WHITE));
this.isHorizontal = (style & SWT.HORIZONTAL) == SWT.HORIZONTAL;
this.isBeginning = (style & SWT.BEGINNING) == SWT.BEGINNING;
this.divider = new Label(host, SWT.NONE);
this.divider.setImage(isHorizontal ? Images.HANDLE_NS.image() : Images.HANDLE_WE.image());
MouseTrackAdapter mouseTrackListener = new SashMouseTrackListener();
host.addMouseTrackListener(mouseTrackListener);
divider.addMouseTrackListener(mouseTrackListener);
MouseMoveListener mouseMoveListener = new SashMouseMoveListener();
host.addMouseMoveListener(mouseMoveListener);
divider.addMouseMoveListener(e -> {
Point p = Display.getCurrent().map(divider, host, new Point(e.x, e.y));
e.x = p.x;
e.y = p.y;
mouseMoveListener.mouseMove(e);
});
MouseListener mouseListener = new SashMouseListener();
host.addMouseListener(mouseListener);
divider.addMouseListener(new SashMouseListener()
{
@Override
public void mouseDown(MouseEvent e)
{
Point p = Display.getCurrent().map(divider, host, new Point(e.x, e.y));
e.x = p.x;
e.y = p.y;
super.mouseDown(e);
}
});
}
public void addQuickNavigation(IMenuListener menuListener)
{
new ContextMenu(divider, menuListener).hook();
}
protected void adjustSize(int curX, int curY)
{
List<Control> children = getChildren();
Rectangle left = children.get(0).getBounds();
Rectangle right = children.get(1).getBounds();
if (left == null || right == null)
return;
int proposedSize = isHorizontal ? curX : curY;
int totalSize = isHorizontal ? left.width + right.width : left.height + right.height;
SashLayoutData data = getLayoutData(children.get(isBeginning ? 0 : 1));
// if collapsed, drag only if proposed size is bigger than min width. That
// excludes many accidental drags when trying to restore via double click.
if (data.size > 0 || (proposedSize > MIN_WIDHT && proposedSize < totalSize - MIN_WIDHT))
{
// ensure minimum size of a child (if not hidden)
proposedSize = Math.max(MIN_WIDHT, proposedSize);
proposedSize = Math.min(totalSize - MIN_WIDHT, proposedSize);
data.size = isBeginning ? proposedSize : totalSize - proposedSize;
}
}
@Override
protected Point computeSize(Composite composite, int wHint, int hHint, boolean flushCache)
{
return new Point(500, 200);
}
public List<Control> getChildren()
{
Control[] children = host.getChildren();
List<Control> answer = new ArrayList<>();
for (Control child : children)
if (child != divider)
answer.add(child);
return answer;
}
@Override
protected void layout(Composite composite, boolean flushCache)
{
List<Control> children = getChildren();
if (children.size() != 2)
throw new IllegalArgumentException();
Rectangle bounds = composite.getBounds();
if (composite instanceof Shell)
{
bounds = ((Shell) composite).getClientArea();
}
else
{
bounds.x = 0;
bounds.y = 0;
}
int availableSize = (isHorizontal ? bounds.width : bounds.height) - SASH_WIDTH;
int fixedSize = Math.max(0, getLayoutData(children.get(isBeginning ? 0 : 1)).size);
int remaining = Math.max(0, availableSize - fixedSize);
int pos = isHorizontal ? bounds.x : bounds.y;
pos += layout(children.get(0), pos, isBeginning ? fixedSize : remaining, bounds);
pos += layoutDivider(pos, bounds);
layout(children.get(1), pos, isBeginning ? remaining : fixedSize, bounds);
}
private int layout(Control control, int newPosition, int newSize, Rectangle bounds)
{
Rectangle subBounds = isHorizontal ? new Rectangle(newPosition, bounds.y, newSize, bounds.height)
: new Rectangle(bounds.x, newPosition, bounds.width, newSize);
control.setBounds(subBounds);
return newSize;
}
private int layoutDivider(final int newPosition, Rectangle bounds)
{
// position the divider rectangle
this.sash = isHorizontal ? new Rectangle(newPosition, bounds.y, SASH_WIDTH, bounds.height)
: new Rectangle(bounds.x, newPosition, bounds.width, SASH_WIDTH);
host.redraw(sash.x, sash.y, sash.width, sash.height, false);
// position the divider image handle
Rectangle imageBounds = divider.getImage().getBounds();
Rectangle dividerBounds = isHorizontal ? //
new Rectangle(newPosition + ((SASH_WIDTH - imageBounds.width) / 2),
(sash.height - imageBounds.height) / 2, imageBounds.width, imageBounds.height)
: new Rectangle((sash.width - imageBounds.width) / 2,
newPosition + (SASH_WIDTH - imageBounds.height) / 2, imageBounds.width,
imageBounds.height);
divider.setBounds(dividerBounds);
return SASH_WIDTH;
}
private SashLayoutData getLayoutData(Control control)
{
SashLayoutData data = (SashLayoutData) control.getLayoutData();
if (data == null)
{
data = new SashLayoutData();
control.setLayoutData(data);
}
return data;
}
}
| Made sash divider invisible unless mouse is hovering over divider
| name.abuchen.portfolio.ui/src/name/abuchen/portfolio/ui/util/swt/SashLayout.java | Made sash divider invisible unless mouse is hovering over divider | <ide><path>ame.abuchen.portfolio.ui/src/name/abuchen/portfolio/ui/util/swt/SashLayout.java
<ide> public void mouseExit(MouseEvent e)
<ide> {
<ide> host.setCursor(null);
<add> divider.setVisible(false);
<add> }
<add>
<add> @Override
<add> public void mouseEnter(MouseEvent e)
<add> {
<add> divider.setVisible(true);
<ide> }
<ide> }
<ide>
<ide>
<ide> host.layout();
<ide> host.update();
<add>
<add> divider.setVisible(false);
<ide> }
<ide> }
<ide>
<ide>
<ide> this.divider = new Label(host, SWT.NONE);
<ide> this.divider.setImage(isHorizontal ? Images.HANDLE_NS.image() : Images.HANDLE_WE.image());
<add> this.divider.setVisible(false);
<ide>
<ide> MouseTrackAdapter mouseTrackListener = new SashMouseTrackListener();
<ide> host.addMouseTrackListener(mouseTrackListener); |
|
Java | mit | error: pathspec '14/E14_10/E14_10.java' did not match any file(s) known to git
| 351b1c992a8b5def24cad2aa1e9dafee1cf945b8 | 1 | maxalthoff/intro-to-java-exercises | /*
Write a program that draws a cylinder.
*/
import javafx.application.Application;
import javafx.stage.Stage;
import javafx.scene.Scene;
import javafx.scene.shape.Arc;
import javafx.scene.shape.ArcType;
import javafx.scene.shape.Line;
import javafx.scene.layout.Pane;
import javafx.scene.layout.StackPane;
import javafx.scene.paint.Color;
import javafx.geometry.Pos;
import javafx.geometry.Insets;
public class E14_10 extends Application {
@Override
public void start(Stage primaryStage) {
CylinderPane pane = new CylinderPane(200, 200 / 2.5);
pane.setAlignment(Pos.CENTER);
pane.setPadding(new Insets(20));
Scene scene = new Scene(pane);
primaryStage.setTitle("E14_10");
primaryStage.setScene(scene);
primaryStage.show();
}
class CylinderPane extends StackPane {
CylinderPane(double xRadius, double yRadius) {
// Container pane for all the shapes
Pane pane = new Pane();
// The top ellipse
Arc topArc = new Arc(xRadius, yRadius, xRadius, yRadius, 0, 360);
topArc.setFill(Color.WHITE);
topArc.setStroke(Color.BLACK);
// The top half of the bottom ellipse, which should be dashed in order
// to denote that it is "unseen" behind the cylinder.
Arc bottomDash =
new Arc(xRadius, yRadius + yRadius * 5, xRadius, yRadius, 0, 180);
bottomDash.setFill(Color.WHITE);
bottomDash.setStroke(Color.BLACK);
bottomDash.getStrokeDashArray().addAll(6.0, 21.0);
// The bottom half of the bottom ellipse, drawn solid since it is on the
// front of the cylinder.
Arc bottomSolid =
new Arc(xRadius, yRadius + yRadius * 5, xRadius, yRadius, 180, 180);
bottomSolid.setFill(Color.WHITE);
bottomSolid.setStroke(Color.BLACK);
bottomSolid.setType(ArcType.OPEN);
// The lines connecting the top and bottom ellipses.
Line leftLine = new Line(0, yRadius, 0, yRadius + yRadius * 5);
leftLine.setStroke(Color.BLACK);
Line rightLine =
new Line(xRadius * 2, yRadius, xRadius * 2, yRadius + yRadius * 5);
// Shapes added to container
pane.getChildren().addAll(
topArc, bottomDash, bottomSolid, leftLine, rightLine);
// Container added to StackPane for presentation
getChildren().add(pane);
}
}
public static void main(String[] args) {
Application.launch(args);
}
}
| 14/E14_10/E14_10.java | Chapter 14, Exercise 10
| 14/E14_10/E14_10.java | Chapter 14, Exercise 10 | <ide><path>4/E14_10/E14_10.java
<add>/*
<add> Write a program that draws a cylinder.
<add>*/
<add>
<add>import javafx.application.Application;
<add>import javafx.stage.Stage;
<add>import javafx.scene.Scene;
<add>import javafx.scene.shape.Arc;
<add>import javafx.scene.shape.ArcType;
<add>import javafx.scene.shape.Line;
<add>import javafx.scene.layout.Pane;
<add>import javafx.scene.layout.StackPane;
<add>import javafx.scene.paint.Color;
<add>import javafx.geometry.Pos;
<add>import javafx.geometry.Insets;
<add>
<add>public class E14_10 extends Application {
<add> @Override
<add> public void start(Stage primaryStage) {
<add> CylinderPane pane = new CylinderPane(200, 200 / 2.5);
<add> pane.setAlignment(Pos.CENTER);
<add> pane.setPadding(new Insets(20));
<add>
<add> Scene scene = new Scene(pane);
<add> primaryStage.setTitle("E14_10");
<add> primaryStage.setScene(scene);
<add> primaryStage.show();
<add> }
<add>
<add> class CylinderPane extends StackPane {
<add> CylinderPane(double xRadius, double yRadius) {
<add> // Container pane for all the shapes
<add> Pane pane = new Pane();
<add>
<add> // The top ellipse
<add> Arc topArc = new Arc(xRadius, yRadius, xRadius, yRadius, 0, 360);
<add> topArc.setFill(Color.WHITE);
<add> topArc.setStroke(Color.BLACK);
<add>
<add> // The top half of the bottom ellipse, which should be dashed in order
<add> // to denote that it is "unseen" behind the cylinder.
<add> Arc bottomDash =
<add> new Arc(xRadius, yRadius + yRadius * 5, xRadius, yRadius, 0, 180);
<add> bottomDash.setFill(Color.WHITE);
<add> bottomDash.setStroke(Color.BLACK);
<add> bottomDash.getStrokeDashArray().addAll(6.0, 21.0);
<add>
<add> // The bottom half of the bottom ellipse, drawn solid since it is on the
<add> // front of the cylinder.
<add> Arc bottomSolid =
<add> new Arc(xRadius, yRadius + yRadius * 5, xRadius, yRadius, 180, 180);
<add> bottomSolid.setFill(Color.WHITE);
<add> bottomSolid.setStroke(Color.BLACK);
<add> bottomSolid.setType(ArcType.OPEN);
<add>
<add> // The lines connecting the top and bottom ellipses.
<add> Line leftLine = new Line(0, yRadius, 0, yRadius + yRadius * 5);
<add> leftLine.setStroke(Color.BLACK);
<add> Line rightLine =
<add> new Line(xRadius * 2, yRadius, xRadius * 2, yRadius + yRadius * 5);
<add>
<add> // Shapes added to container
<add> pane.getChildren().addAll(
<add> topArc, bottomDash, bottomSolid, leftLine, rightLine);
<add>
<add> // Container added to StackPane for presentation
<add> getChildren().add(pane);
<add> }
<add> }
<add>
<add> public static void main(String[] args) {
<add> Application.launch(args);
<add> }
<add>} |
|
Java | apache-2.0 | a2ea70011fdb58f928a336f57df1bdf35ccf8993 | 0 | nssales/OG-Platform,codeaudit/OG-Platform,jerome79/OG-Platform,codeaudit/OG-Platform,McLeodMoores/starling,nssales/OG-Platform,ChinaQuants/OG-Platform,ChinaQuants/OG-Platform,DevStreet/FinanceAnalytics,jerome79/OG-Platform,McLeodMoores/starling,ChinaQuants/OG-Platform,DevStreet/FinanceAnalytics,codeaudit/OG-Platform,jerome79/OG-Platform,nssales/OG-Platform,ChinaQuants/OG-Platform,codeaudit/OG-Platform,DevStreet/FinanceAnalytics,nssales/OG-Platform,DevStreet/FinanceAnalytics,jeorme/OG-Platform,McLeodMoores/starling,jeorme/OG-Platform,McLeodMoores/starling,jeorme/OG-Platform,jerome79/OG-Platform,jeorme/OG-Platform | /**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.livedata.client;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.fudgemsg.FudgeContext;
import org.fudgemsg.FudgeMsg;
import org.fudgemsg.FudgeMsgEnvelope;
import org.fudgemsg.mapping.FudgeDeserializer;
import org.fudgemsg.mapping.FudgeSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.ImmutableMap;
import com.opengamma.OpenGammaRuntimeException;
import com.opengamma.livedata.LiveDataSpecification;
import com.opengamma.livedata.UserPrincipal;
import com.opengamma.livedata.msg.EntitlementRequest;
import com.opengamma.livedata.msg.EntitlementResponse;
import com.opengamma.livedata.msg.EntitlementResponseMsg;
import com.opengamma.transport.FudgeMessageReceiver;
import com.opengamma.transport.FudgeRequestSender;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.fudgemsg.OpenGammaFudgeContext;
/**
* Checks entitlements against a LiveData server by sending the server a Fudge message.
*/
public class DistributedEntitlementChecker {
/**
* If no response from server is received within this period of time, throw exception
*/
public static final long TIMEOUT_MS = 5000;
private static final Logger s_logger = LoggerFactory.getLogger(DistributedEntitlementChecker.class);
private final FudgeRequestSender _requestSender;
private final FudgeContext _fudgeContext;
public DistributedEntitlementChecker(FudgeRequestSender requestSender) {
this(requestSender, OpenGammaFudgeContext.getInstance());
}
public DistributedEntitlementChecker(FudgeRequestSender requestSender, FudgeContext fudgeContext) {
ArgumentChecker.notNull(requestSender, "Request Sender");
ArgumentChecker.notNull(fudgeContext, "Fudge Context");
_requestSender = requestSender;
_fudgeContext = fudgeContext;
}
public Map<LiveDataSpecification, Boolean> isEntitled(UserPrincipal user,
Collection<LiveDataSpecification> specifications) {
s_logger.info("Checking entitlements by {} to {}", user, specifications);
// The entitlement check completely and utterly screws up the market data
// server for Activ, wiping out valid resolved ids. As the result of the
// entitlement check is currently ignored, we'll just avoid doing it at all!
if (true) {
return ImmutableMap.of();
}
final Map<LiveDataSpecification, Boolean> returnValue = new HashMap<LiveDataSpecification, Boolean>();
if (specifications == null || specifications.size() == 0) {
// Nothing to check
return returnValue;
}
FudgeMsg requestMessage = composeRequestMessage(user, specifications);
final CountDownLatch latch = new CountDownLatch(1);
_requestSender.sendRequest(requestMessage, new FudgeMessageReceiver() {
@Override
public void messageReceived(FudgeContext fudgeContext,
FudgeMsgEnvelope msgEnvelope) {
FudgeMsg msg = msgEnvelope.getMessage();
EntitlementResponseMsg responseMsg = EntitlementResponseMsg.fromFudgeMsg(new FudgeDeserializer(fudgeContext), msg);
for (EntitlementResponse response : responseMsg.getResponses()) {
returnValue.put(response.getLiveDataSpecification(), response.getIsEntitled());
}
latch.countDown();
}
});
boolean success;
try {
success = latch.await(TIMEOUT_MS, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
Thread.interrupted();
throw new OpenGammaRuntimeException("Interrupted", e);
}
if (!success) {
throw new OpenGammaRuntimeException("Timeout. Waited for entitlement response for " + TIMEOUT_MS + " with no response.");
}
s_logger.info("Got entitlement response {}", returnValue);
return returnValue;
}
public boolean isEntitled(UserPrincipal user,
LiveDataSpecification specification) {
Map<LiveDataSpecification, Boolean> entitlements = isEntitled(user, Collections.singleton(specification));
return entitlements.get(specification);
}
private FudgeMsg composeRequestMessage(UserPrincipal user,
Collection<LiveDataSpecification> specifications) {
EntitlementRequest request = new EntitlementRequest(user, specifications);
return request.toFudgeMsg(new FudgeSerializer(_fudgeContext));
}
}
| projects/OG-LiveData/src/main/java/com/opengamma/livedata/client/DistributedEntitlementChecker.java | /**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.livedata.client;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.fudgemsg.FudgeContext;
import org.fudgemsg.FudgeMsg;
import org.fudgemsg.FudgeMsgEnvelope;
import org.fudgemsg.mapping.FudgeDeserializer;
import org.fudgemsg.mapping.FudgeSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.ImmutableMap;
import com.opengamma.OpenGammaRuntimeException;
import com.opengamma.livedata.LiveDataSpecification;
import com.opengamma.livedata.UserPrincipal;
import com.opengamma.livedata.msg.EntitlementRequest;
import com.opengamma.livedata.msg.EntitlementResponse;
import com.opengamma.livedata.msg.EntitlementResponseMsg;
import com.opengamma.transport.FudgeMessageReceiver;
import com.opengamma.transport.FudgeRequestSender;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.fudgemsg.OpenGammaFudgeContext;
/**
* Checks entitlements against a LiveData server by sending the server a Fudge message.
*/
public class DistributedEntitlementChecker {
/**
* If no response from server is received within this period of time, throw exception
*/
public static final long TIMEOUT_MS = 5000;
private static final Logger s_logger = LoggerFactory.getLogger(DistributedEntitlementChecker.class);
private final FudgeRequestSender _requestSender;
private final FudgeContext _fudgeContext;
public DistributedEntitlementChecker(FudgeRequestSender requestSender) {
this(requestSender, OpenGammaFudgeContext.getInstance());
}
public DistributedEntitlementChecker(FudgeRequestSender requestSender, FudgeContext fudgeContext) {
ArgumentChecker.notNull(requestSender, "Request Sender");
ArgumentChecker.notNull(fudgeContext, "Fudge Context");
_requestSender = requestSender;
_fudgeContext = fudgeContext;
}
public Map<LiveDataSpecification, Boolean> isEntitled(UserPrincipal user,
Collection<LiveDataSpecification> specifications) {
s_logger.info("Checking entitlements by {} to {}", user, specifications);
// The entitlement check completely and utterly screws up the market data
// server for Activ, wiping out valid resolved ids. As the result of the
// entitlement check is currently ignored, we'll just avoid doing it at all!
if (true) return ImmutableMap.of();
final Map<LiveDataSpecification, Boolean> returnValue = new HashMap<LiveDataSpecification, Boolean>();
if (specifications == null || specifications.size() == 0) {
// Nothing to check
return returnValue;
}
FudgeMsg requestMessage = composeRequestMessage(user, specifications);
final CountDownLatch latch = new CountDownLatch(1);
_requestSender.sendRequest(requestMessage, new FudgeMessageReceiver() {
@Override
public void messageReceived(FudgeContext fudgeContext,
FudgeMsgEnvelope msgEnvelope) {
FudgeMsg msg = msgEnvelope.getMessage();
EntitlementResponseMsg responseMsg = EntitlementResponseMsg.fromFudgeMsg(new FudgeDeserializer(fudgeContext), msg);
for (EntitlementResponse response : responseMsg.getResponses()) {
returnValue.put(response.getLiveDataSpecification(), response.getIsEntitled());
}
latch.countDown();
}
});
boolean success;
try {
success = latch.await(TIMEOUT_MS, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
Thread.interrupted();
throw new OpenGammaRuntimeException("Interrupted", e);
}
if (!success) {
throw new OpenGammaRuntimeException("Timeout. Waited for entitlement response for " + TIMEOUT_MS + " with no response.");
}
s_logger.info("Got entitlement response {}", returnValue);
return returnValue;
}
public boolean isEntitled(UserPrincipal user,
LiveDataSpecification specification) {
Map<LiveDataSpecification, Boolean> entitlements = isEntitled(user, Collections.singleton(specification));
return entitlements.get(specification);
}
private FudgeMsg composeRequestMessage(UserPrincipal user,
Collection<LiveDataSpecification> specifications) {
EntitlementRequest request = new EntitlementRequest(user, specifications);
return request.toFudgeMsg(new FudgeSerializer(_fudgeContext));
}
}
| Fix Checkstyle error
| projects/OG-LiveData/src/main/java/com/opengamma/livedata/client/DistributedEntitlementChecker.java | Fix Checkstyle error | <ide><path>rojects/OG-LiveData/src/main/java/com/opengamma/livedata/client/DistributedEntitlementChecker.java
<ide> // The entitlement check completely and utterly screws up the market data
<ide> // server for Activ, wiping out valid resolved ids. As the result of the
<ide> // entitlement check is currently ignored, we'll just avoid doing it at all!
<del> if (true) return ImmutableMap.of();
<add> if (true) {
<add> return ImmutableMap.of();
<add> }
<ide>
<ide> final Map<LiveDataSpecification, Boolean> returnValue = new HashMap<LiveDataSpecification, Boolean>();
<ide> |
|
Java | apache-2.0 | 5333bb4b960cbfbd19ff1db0a3a65338d77b8c3e | 0 | talklittle/liquibase,ivaylo5ev/liquibase,evigeant/liquibase,cleiter/liquibase,balazs-zsoldos/liquibase,fossamagna/liquibase,mortegac/liquibase,iherasymenko/liquibase,dprguard2000/liquibase,dprguard2000/liquibase,instantdelay/liquibase,mattbertolini/liquibase,dyk/liquibase,tjardo83/liquibase,Datical/liquibase,mattbertolini/liquibase,CoderPaulK/liquibase,pellcorp/liquibase,russ-p/liquibase,pellcorp/liquibase,cbotiza/liquibase,mbreslow/liquibase,dbmanul/dbmanul,jimmycd/liquibase,danielkec/liquibase,iherasymenko/liquibase,maberle/liquibase,vfpfafrf/liquibase,EVODelavega/liquibase,lazaronixon/liquibase,foxel/liquibase,instantdelay/liquibase,hbogaards/liquibase,pellcorp/liquibase,instantdelay/liquibase,adriens/liquibase,syncron/liquibase,hbogaards/liquibase,NSIT/liquibase,OpenCST/liquibase,dyk/liquibase,syncron/liquibase,cleiter/liquibase,mbreslow/liquibase,iherasymenko/liquibase,OpenCST/liquibase,hbogaards/liquibase,talklittle/liquibase,tjardo83/liquibase,mattbertolini/liquibase,mbreslow/liquibase,dprguard2000/liquibase,liquibase/liquibase,maberle/liquibase,cbotiza/liquibase,vfpfafrf/liquibase,balazs-zsoldos/liquibase,FreshGrade/liquibase,vast-engineering/liquibase,cbotiza/liquibase,FreshGrade/liquibase,cbotiza/liquibase,gquintana/liquibase,ivaylo5ev/liquibase,vbekiaris/liquibase,danielkec/liquibase,klopfdreh/liquibase,syncron/liquibase,vbekiaris/liquibase,vfpfafrf/liquibase,mortegac/liquibase,adriens/liquibase,lazaronixon/liquibase,iherasymenko/liquibase,fbiville/liquibase,C0mmi3/liquibase,lazaronixon/liquibase,CoderPaulK/liquibase,pellcorp/liquibase,evigeant/liquibase,mortegac/liquibase,mortegac/liquibase,FreshGrade/liquibase,liquibase/liquibase,vast-engineering/liquibase,dbmanul/dbmanul,talklittle/liquibase,balazs-zsoldos/liquibase,EVODelavega/liquibase,lazaronixon/liquibase,vbekiaris/liquibase,hbogaards/liquibase,jimmycd/liquibase,maberle/liquibase,evigeant/liquibase,dbmanul/dbmanul,FreshGrade/liquibase,adriens/liquibase,russ-p/liquibase,EVODelavega/liquibase,liquibase/liquibase,fossamagna/liquibase,gquintana/liquibase,OpenCST/liquibase,C0mmi3/liquibase,Datical/liquibase,jimmycd/liquibase,CoderPaulK/liquibase,foxel/liquibase,mwaylabs/liquibase,OpenCST/liquibase,dyk/liquibase,Willem1987/liquibase,gquintana/liquibase,mattbertolini/liquibase,cleiter/liquibase,klopfdreh/liquibase,EVODelavega/liquibase,foxel/liquibase,C0mmi3/liquibase,NSIT/liquibase,gquintana/liquibase,vbekiaris/liquibase,fbiville/liquibase,danielkec/liquibase,Datical/liquibase,Willem1987/liquibase,russ-p/liquibase,tjardo83/liquibase,jimmycd/liquibase,CoderPaulK/liquibase,Willem1987/liquibase,dbmanul/dbmanul,vast-engineering/liquibase,syncron/liquibase,evigeant/liquibase,vast-engineering/liquibase,tjardo83/liquibase,vfpfafrf/liquibase,foxel/liquibase,dprguard2000/liquibase,dyk/liquibase,fbiville/liquibase,klopfdreh/liquibase,cleiter/liquibase,talklittle/liquibase,instantdelay/liquibase,NSIT/liquibase,klopfdreh/liquibase,C0mmi3/liquibase,danielkec/liquibase,mwaylabs/liquibase,mwaylabs/liquibase,balazs-zsoldos/liquibase,Datical/liquibase,Willem1987/liquibase,maberle/liquibase,NSIT/liquibase,mbreslow/liquibase,mwaylabs/liquibase,russ-p/liquibase,fossamagna/liquibase,fbiville/liquibase | package liquibase.resource;
import liquibase.util.FileUtil;
import liquibase.util.StringUtils;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.*;
import java.util.*;
/**
* An implementation of {@link liquibase.resource.ResourceAccessor} that wraps a class loader.
*/
public class ClassLoaderResourceAccessor extends AbstractResourceAccessor {
private ClassLoader classLoader;
public ClassLoaderResourceAccessor() {
this.classLoader = getClass().getClassLoader();
init();
}
public ClassLoaderResourceAccessor(ClassLoader classLoader) {
this.classLoader = classLoader;
init();
}
@Override
public Set<InputStream> getResourcesAsStream(String path) throws IOException {
Enumeration<URL> resources = classLoader.getResources(path);
if (resources == null || !resources.hasMoreElements()) {
return null;
}
Set<String> seenUrls = new HashSet<String>();
Set<InputStream> returnSet = new HashSet<InputStream>();
while (resources.hasMoreElements()) {
URL url = resources.nextElement();
if (seenUrls.contains(url.toExternalForm())) {
continue;
}
seenUrls.add(url.toExternalForm());
InputStream resourceAsStream = url.openStream();
if (resourceAsStream != null) {
returnSet.add(resourceAsStream);
}
}
return returnSet;
}
@Override
public Set<String> list(String relativeTo, String path, boolean includeFiles, boolean includeDirectories, boolean recursive) throws IOException {
path = convertToPath(relativeTo, path);
URL fileUrl = classLoader.getResource(path);
if (fileUrl == null) {
return null;
}
if (!fileUrl.toExternalForm().startsWith("file:")) {
if (fileUrl.toExternalForm().startsWith("jar:file:")
|| fileUrl.toExternalForm().startsWith("wsjar:file:")
|| fileUrl.toExternalForm().startsWith("zip:")) {
String file = fileUrl.getFile();
String splitPath = file.split("!")[0];
if (splitPath.matches("file:\\/[A-Za-z]:\\/.*")) {
splitPath = splitPath.replaceFirst("file:\\/", "");
} else {
splitPath = splitPath.replaceFirst("file:", "");
}
splitPath = URLDecoder.decode(splitPath, "UTF-8");
File zipfile = new File(splitPath);
File zipFileDir = FileUtil.unzip(zipfile);
if (path.startsWith("classpath:")) {
path = path.replaceFirst("classpath:", "");
}
if (path.startsWith("classpath*:")) {
path = path.replaceFirst("classpath\\*:", "");
}
URI fileUri = new File(zipFileDir, path).toURI();
fileUrl = fileUri.toURL();
}
}
try {
File file = new File(fileUrl.toURI());
if (file.exists()) {
Set<String> returnSet = new HashSet<String>();
getContents(file, recursive, includeFiles, includeDirectories, path, returnSet);
return returnSet;
}
} catch (URISyntaxException e) {
//not a local file
} catch (IllegalArgumentException e) {
//not a local file
}
Enumeration<URL> resources = classLoader.getResources(path);
if (resources == null || !resources.hasMoreElements()) {
return null;
}
Set<String> returnSet = new HashSet<String>();
while (resources.hasMoreElements()) {
String url = resources.nextElement().toExternalForm();
url = url.replaceFirst("^\\Q"+path+"\\E", "");
returnSet.add(url);
}
return returnSet;
}
@Override
public ClassLoader toClassLoader() {
return classLoader;
}
@Override
public String toString() {
String description;
if (classLoader instanceof URLClassLoader) {
List<String> urls = new ArrayList<String>();
for (URL url : ((URLClassLoader) classLoader).getURLs()) {
urls.add(url.toExternalForm());
}
description = StringUtils.join(urls, ",");
} else {
description = classLoader.getClass().getName();
}
return getClass().getName()+"("+ description +")";
}
}
| liquibase-core/src/main/java/liquibase/resource/ClassLoaderResourceAccessor.java | package liquibase.resource;
import liquibase.util.FileUtil;
import liquibase.util.StringUtils;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.*;
import java.util.*;
/**
* An implementation of {@link liquibase.resource.ResourceAccessor} that wraps a class loader.
*/
public class ClassLoaderResourceAccessor extends AbstractResourceAccessor {
private ClassLoader classLoader;
public ClassLoaderResourceAccessor() {
this.classLoader = getClass().getClassLoader();
init();
}
public ClassLoaderResourceAccessor(ClassLoader classLoader) {
this.classLoader = classLoader;
init();
}
@Override
public Set<InputStream> getResourcesAsStream(String path) throws IOException {
Enumeration<URL> resources = classLoader.getResources(path);
if (resources == null || !resources.hasMoreElements()) {
return null;
}
Set<InputStream> returnSet = new HashSet<InputStream>();
while (resources.hasMoreElements()) {
URL url = resources.nextElement();
InputStream resourceAsStream = url.openStream();
if (resourceAsStream != null) {
returnSet.add(resourceAsStream);
}
}
return returnSet;
}
@Override
public Set<String> list(String relativeTo, String path, boolean includeFiles, boolean includeDirectories, boolean recursive) throws IOException {
path = convertToPath(relativeTo, path);
URL fileUrl = classLoader.getResource(path);
if (fileUrl == null) {
return null;
}
if (!fileUrl.toExternalForm().startsWith("file:")) {
if (fileUrl.toExternalForm().startsWith("jar:file:")
|| fileUrl.toExternalForm().startsWith("wsjar:file:")
|| fileUrl.toExternalForm().startsWith("zip:")) {
String file = fileUrl.getFile();
String splitPath = file.split("!")[0];
if (splitPath.matches("file:\\/[A-Za-z]:\\/.*")) {
splitPath = splitPath.replaceFirst("file:\\/", "");
} else {
splitPath = splitPath.replaceFirst("file:", "");
}
splitPath = URLDecoder.decode(splitPath, "UTF-8");
File zipfile = new File(splitPath);
File zipFileDir = FileUtil.unzip(zipfile);
if (path.startsWith("classpath:")) {
path = path.replaceFirst("classpath:", "");
}
if (path.startsWith("classpath*:")) {
path = path.replaceFirst("classpath\\*:", "");
}
URI fileUri = new File(zipFileDir, path).toURI();
fileUrl = fileUri.toURL();
}
}
try {
File file = new File(fileUrl.toURI());
if (file.exists()) {
Set<String> returnSet = new HashSet<String>();
getContents(file, recursive, includeFiles, includeDirectories, path, returnSet);
return returnSet;
}
} catch (URISyntaxException e) {
//not a local file
} catch (IllegalArgumentException e) {
//not a local file
}
Enumeration<URL> resources = classLoader.getResources(path);
if (resources == null || !resources.hasMoreElements()) {
return null;
}
Set<String> returnSet = new HashSet<String>();
while (resources.hasMoreElements()) {
String url = resources.nextElement().toExternalForm();
url = url.replaceFirst("^\\Q"+path+"\\E", "");
returnSet.add(url);
}
return returnSet;
}
@Override
public ClassLoader toClassLoader() {
return classLoader;
}
@Override
public String toString() {
String description;
if (classLoader instanceof URLClassLoader) {
List<String> urls = new ArrayList<String>();
for (URL url : ((URLClassLoader) classLoader).getURLs()) {
urls.add(url.toExternalForm());
}
description = StringUtils.join(urls, ",");
} else {
description = classLoader.getClass().getName();
}
return getClass().getName()+"("+ description +")";
}
}
| classloader roots can duplicate up on URLs
| liquibase-core/src/main/java/liquibase/resource/ClassLoaderResourceAccessor.java | classloader roots can duplicate up on URLs | <ide><path>iquibase-core/src/main/java/liquibase/resource/ClassLoaderResourceAccessor.java
<ide> if (resources == null || !resources.hasMoreElements()) {
<ide> return null;
<ide> }
<add> Set<String> seenUrls = new HashSet<String>();
<ide> Set<InputStream> returnSet = new HashSet<InputStream>();
<ide> while (resources.hasMoreElements()) {
<ide> URL url = resources.nextElement();
<add> if (seenUrls.contains(url.toExternalForm())) {
<add> continue;
<add> }
<add> seenUrls.add(url.toExternalForm());
<ide> InputStream resourceAsStream = url.openStream();
<ide> if (resourceAsStream != null) {
<ide> returnSet.add(resourceAsStream); |
|
Java | apache-2.0 | afef6f84f5e796ebfc9af86c233b6b8cde2f4319 | 0 | jruesga/rview,jruesga/rview,jruesga/rview | /*
* Copyright (C) 2016 Jorge Ruesga
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ruesga.rview.misc;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapShader;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.drawable.Drawable;
import android.support.annotation.ColorRes;
import android.support.v4.content.ContextCompat;
import android.support.v4.graphics.drawable.DrawableCompat;
import android.widget.ImageView;
import com.jakewharton.picasso.OkHttp3Downloader;
import com.ruesga.rview.R;
import com.ruesga.rview.gerrit.model.AccountInfo;
import com.squareup.picasso.Callback;
import com.squareup.picasso.Picasso;
import com.squareup.picasso.Transformation;
import java.io.File;
import java.util.List;
import okhttp3.Cache;
import okhttp3.OkHttpClient;
import okhttp3.Response;
public class PicassoHelper {
@SuppressLint("StaticFieldLeak")
private static Picasso sPicasso;
public static Picasso getPicassoClient(Context context) {
if (sPicasso == null) {
final File cacheDir = CacheHelper.getAccountCacheDir(context);
OkHttpClient client = new OkHttpClient.Builder()
.addNetworkInterceptor(chain -> {
Response originalResponse = chain.proceed(chain.request());
return CacheHelper.addCacheControl(originalResponse.newBuilder()).build();
})
.cache(new Cache(cacheDir, CacheHelper.MAX_DISK_CACHE))
.build();
OkHttp3Downloader downloader = new OkHttp3Downloader(client);
sPicasso = new Picasso.Builder(context.getApplicationContext())
.defaultBitmapConfig(Bitmap.Config.ARGB_8888)
.downloader(downloader)
.build();
}
return sPicasso;
}
public static Drawable getDefaultAvatar(Context context, @ColorRes int color) {
Drawable drawable = ContextCompat.getDrawable(context, R.drawable.ic_account_circle);
DrawableCompat.setTint(drawable, ContextCompat.getColor(context, color));
return drawable;
}
public static void bindAvatar(Context context, Picasso picasso, AccountInfo account,
ImageView view, Drawable placeholder) {
final List<String> avatarUrls = ModelHelper.getAvatarUrl(context, account);
loadWithFallbackUrls(picasso, view, placeholder, avatarUrls);
}
private static void loadWithFallbackUrls(final Picasso picasso, final ImageView view,
final Drawable placeholder, final List<String> urls) {
final String nextUrl;
synchronized (urls) {
nextUrl = urls.isEmpty() ? null : urls.get(0);
}
if (nextUrl != null) {
picasso.load(nextUrl)
.placeholder(placeholder)
.transform(new CircleTransform())
.into(view, new Callback() {
@Override
public void onSuccess() {
synchronized (urls) {
urls.clear();
urls.add(nextUrl);
}
}
@Override
public void onError() {
// Next url
synchronized (urls) {
if (urls.contains(nextUrl)) {
urls.remove(nextUrl);
}
}
loadWithFallbackUrls(picasso, view, placeholder, urls);
}
});
} else {
// Placeholder
view.setImageDrawable(placeholder);
}
}
// http://stackoverflow.com/questions/26112150/android-create-circular-image-with-picasso
private static class CircleTransform implements Transformation {
@Override
public Bitmap transform(Bitmap source) {
int size = Math.min(source.getWidth(), source.getHeight());
int x = (source.getWidth() - size) / 2;
int y = (source.getHeight() - size) / 2;
Bitmap squaredBitmap = Bitmap.createBitmap(source, x, y, size, size);
if (squaredBitmap != source) {
source.recycle();
}
Bitmap bitmap = Bitmap.createBitmap(size, size, source.getConfig());
Canvas canvas = new Canvas(bitmap);
Paint paint = new Paint();
BitmapShader shader = new BitmapShader(squaredBitmap,
BitmapShader.TileMode.CLAMP, BitmapShader.TileMode.CLAMP);
paint.setShader(shader);
paint.setAntiAlias(true);
float r = size / 2f;
canvas.drawCircle(r, r, r, paint);
squaredBitmap.recycle();
return bitmap;
}
@Override
public String key() {
return "circle";
}
}
}
| app/src/main/java/com/ruesga/rview/misc/PicassoHelper.java | /*
* Copyright (C) 2016 Jorge Ruesga
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ruesga.rview.misc;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapShader;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.drawable.Drawable;
import android.support.annotation.ColorRes;
import android.support.v4.content.ContextCompat;
import android.support.v4.graphics.drawable.DrawableCompat;
import android.widget.ImageView;
import com.jakewharton.picasso.OkHttp3Downloader;
import com.ruesga.rview.R;
import com.ruesga.rview.gerrit.model.AccountInfo;
import com.squareup.picasso.Callback;
import com.squareup.picasso.Picasso;
import com.squareup.picasso.Transformation;
import java.io.File;
import java.util.List;
import okhttp3.Cache;
import okhttp3.OkHttpClient;
import okhttp3.Response;
public class PicassoHelper {
@SuppressLint("StaticFieldLeak")
private static Picasso sPicasso;
public static Picasso getPicassoClient(Context context) {
if (sPicasso == null) {
final File cacheDir = CacheHelper.getAccountCacheDir(context);
OkHttpClient client = new OkHttpClient.Builder()
.addNetworkInterceptor(chain -> {
Response originalResponse = chain.proceed(chain.request());
return CacheHelper.addCacheControl(originalResponse.newBuilder()).build();
})
.cache(new Cache(cacheDir, CacheHelper.MAX_DISK_CACHE))
.build();
OkHttp3Downloader downloader = new OkHttp3Downloader(client);
sPicasso = new Picasso.Builder(context.getApplicationContext())
.defaultBitmapConfig(Bitmap.Config.ARGB_8888)
.downloader(downloader)
.build();
}
return sPicasso;
}
public static Drawable getDefaultAvatar(Context context, @ColorRes int color) {
Drawable drawable = ContextCompat.getDrawable(context, R.drawable.ic_account_circle);
DrawableCompat.setTint(drawable, ContextCompat.getColor(context, color));
return drawable;
}
public static void bindAvatar(Context context, Picasso picasso, AccountInfo account,
ImageView view, Drawable placeholder) {
final List<String> avatarUrls = ModelHelper.getAvatarUrl(context, account);
loadWithFallbackUrls(picasso, view, placeholder, avatarUrls);
}
private static void loadWithFallbackUrls(final Picasso picasso, final ImageView view,
final Drawable placeholder, final List<String> urls) {
if (!urls.isEmpty()) {
final String nextUrl = urls.get(0);
picasso.load(nextUrl)
.placeholder(placeholder)
.transform(new CircleTransform())
.into(view, new Callback() {
@Override
public void onSuccess() {
synchronized (urls) {
urls.clear();
urls.add(nextUrl);
}
}
@Override
public void onError() {
// Next url
synchronized (urls) {
if (urls.contains(nextUrl)) {
urls.remove(nextUrl);
}
}
loadWithFallbackUrls(picasso, view, placeholder, urls);
}
});
} else {
// Placeholder
view.setImageDrawable(placeholder);
}
}
// http://stackoverflow.com/questions/26112150/android-create-circular-image-with-picasso
private static class CircleTransform implements Transformation {
@Override
public Bitmap transform(Bitmap source) {
int size = Math.min(source.getWidth(), source.getHeight());
int x = (source.getWidth() - size) / 2;
int y = (source.getHeight() - size) / 2;
Bitmap squaredBitmap = Bitmap.createBitmap(source, x, y, size, size);
if (squaredBitmap != source) {
source.recycle();
}
Bitmap bitmap = Bitmap.createBitmap(size, size, source.getConfig());
Canvas canvas = new Canvas(bitmap);
Paint paint = new Paint();
BitmapShader shader = new BitmapShader(squaredBitmap,
BitmapShader.TileMode.CLAMP, BitmapShader.TileMode.CLAMP);
paint.setShader(shader);
paint.setAntiAlias(true);
float r = size / 2f;
canvas.drawCircle(r, r, r, paint);
squaredBitmap.recycle();
return bitmap;
}
@Override
public String key() {
return "circle";
}
}
}
| Synchronize avatar urls isempty check access as well
Signed-off-by: Jorge Ruesga <[email protected]>
| app/src/main/java/com/ruesga/rview/misc/PicassoHelper.java | Synchronize avatar urls isempty check access as well | <ide><path>pp/src/main/java/com/ruesga/rview/misc/PicassoHelper.java
<ide>
<ide> private static void loadWithFallbackUrls(final Picasso picasso, final ImageView view,
<ide> final Drawable placeholder, final List<String> urls) {
<del> if (!urls.isEmpty()) {
<del> final String nextUrl = urls.get(0);
<add> final String nextUrl;
<add> synchronized (urls) {
<add> nextUrl = urls.isEmpty() ? null : urls.get(0);
<add> }
<add> if (nextUrl != null) {
<ide> picasso.load(nextUrl)
<ide> .placeholder(placeholder)
<ide> .transform(new CircleTransform()) |
|
JavaScript | apache-2.0 | 9393d03992425e0564c96e96f23dc327f5180162 | 0 | jitsi/lib-jitsi-meet,jitsi/lib-jitsi-meet | import { getLogger } from '@jitsi/logger';
import { Interop } from '@jitsi/sdp-interop';
import transform from 'sdp-transform';
import * as CodecMimeType from '../../service/RTC/CodecMimeType';
import MediaDirection from '../../service/RTC/MediaDirection';
import * as MediaType from '../../service/RTC/MediaType';
import RTCEvents from '../../service/RTC/RTCEvents';
import * as SignalingEvents from '../../service/RTC/SignalingEvents';
import { getSourceNameForJitsiTrack } from '../../service/RTC/SignalingLayer';
import * as VideoType from '../../service/RTC/VideoType';
import { SS_DEFAULT_FRAME_RATE } from '../RTC/ScreenObtainer';
import browser from '../browser';
import FeatureFlags from '../flags/FeatureFlags';
import LocalSdpMunger from '../sdp/LocalSdpMunger';
import RtxModifier from '../sdp/RtxModifier';
import SDP from '../sdp/SDP';
import SDPUtil from '../sdp/SDPUtil';
import SdpConsistency from '../sdp/SdpConsistency';
import { SdpTransformWrap } from '../sdp/SdpTransformUtil';
import * as GlobalOnErrorHandler from '../util/GlobalOnErrorHandler';
import JitsiRemoteTrack from './JitsiRemoteTrack';
import RTC from './RTC';
import RTCUtils from './RTCUtils';
import {
HD_BITRATE,
HD_SCALE_FACTOR,
SIM_LAYER_RIDS,
TPCUtils
} from './TPCUtils';
// FIXME SDP tools should end up in some kind of util module
const logger = getLogger(__filename);
const DEGRADATION_PREFERENCE_CAMERA = 'maintain-framerate';
const DEGRADATION_PREFERENCE_DESKTOP = 'maintain-resolution';
/* eslint-disable max-params */
/**
* Creates new instance of 'TraceablePeerConnection'.
*
* @param {RTC} rtc the instance of <tt>RTC</tt> service
* @param {number} id the peer connection id assigned by the parent RTC module.
* @param {SignalingLayer} signalingLayer the signaling layer instance
* @param {object} pcConfig The {@code RTCConfiguration} to use for the WebRTC peer connection.
* @param {object} constraints WebRTC 'PeerConnection' constraints
* @param {boolean} isP2P indicates whether or not the new instance will be used in a peer to peer connection.
* @param {object} options <tt>TracablePeerConnection</tt> config options.
* @param {boolean} options.disableSimulcast if set to 'true' will disable the simulcast.
* @param {boolean} options.disableRtx if set to 'true' will disable the RTX.
* @param {string} options.disabledCodec the mime type of the code that should not be negotiated on the peerconnection.
* @param {string} options.preferredCodec the mime type of the codec that needs to be made the preferred codec for the
* peerconnection.
* @param {boolean} options.startSilent If set to 'true' no audio will be sent or received.
* @param {boolean} options.usesUnifiedPlan Indicates if the browser is running in unified plan mode.
*
* FIXME: initially the purpose of TraceablePeerConnection was to be able to
* debug the peer connection. Since many other responsibilities have been added
* it would make sense to extract a separate class from it and come up with
* a more suitable name.
*
* @constructor
*/
export default function TraceablePeerConnection(
rtc,
id,
signalingLayer,
pcConfig,
constraints,
isP2P,
options) {
/**
* Indicates whether or not this peer connection instance is actively
* sending/receiving audio media. When set to <tt>false</tt> the SDP audio
* media direction will be adjusted to 'inactive' in order to suspend
* the transmission.
* @type {boolean}
* @private
*/
this.audioTransferActive = !(options.startSilent === true);
/**
* The DTMF sender instance used to send DTMF tones.
*
* @type {RTCDTMFSender|undefined}
* @private
*/
this._dtmfSender = undefined;
/**
* @typedef {Object} TouchToneRequest
* @property {string} tones - The DTMF tones string as defined by
* {@code RTCDTMFSender.insertDTMF}, 'tones' argument.
* @property {number} duration - The amount of time in milliseconds that
* each DTMF should last.
* @property {string} interToneGap - The length of time in miliseconds to
* wait between tones.
*/
/**
* TouchToneRequests which are waiting to be played. This queue is filled
* if there are touch tones currently being played.
*
* @type {Array<TouchToneRequest>}
* @private
*/
this._dtmfTonesQueue = [];
/**
* Indicates whether or not this peer connection instance is actively
* sending/receiving video media. When set to <tt>false</tt> the SDP video
* media direction will be adjusted to 'inactive' in order to suspend
* the transmission.
* @type {boolean}
* @private
*/
this.videoTransferActive = true;
/**
* The parent instance of RTC service which created this
* <tt>TracablePeerConnection</tt>.
* @type {RTC}
*/
this.rtc = rtc;
/**
* The peer connection identifier assigned by the RTC module.
* @type {number}
*/
this.id = id;
/**
* Indicates whether or not this instance is used in a peer to peer
* connection.
* @type {boolean}
*/
this.isP2P = isP2P;
// FIXME: We should support multiple streams per jid.
/**
* The map holds remote tracks associated with this peer connection.
* It maps user's JID to media type and remote track
* (one track per media type per user's JID).
* @type {Map<string, Map<MediaType, JitsiRemoteTrack>>}
*/
this.remoteTracks = new Map();
/**
* A map which stores local tracks mapped by {@link JitsiLocalTrack.rtcId}
* @type {Map<number, JitsiLocalTrack>}
*/
this.localTracks = new Map();
/**
* Keeps tracks of the WebRTC <tt>MediaStream</tt>s that have been added to
* the underlying WebRTC PeerConnection.
* @type {Array}
* @private
*/
this._addedStreams = [];
/**
* @typedef {Object} TPCGroupInfo
* @property {string} semantics the SSRC groups semantics
* @property {Array<number>} ssrcs group's SSRCs in order where the first
* one is group's primary SSRC, the second one is secondary (RTX) and so
* on...
*/
/**
* @typedef {Object} TPCSSRCInfo
* @property {Array<number>} ssrcs an array which holds all track's SSRCs
* @property {Array<TPCGroupInfo>} groups an array stores all track's SSRC
* groups
*/
/**
* Holds the info about local track's SSRCs mapped per their
* {@link JitsiLocalTrack.rtcId}
* @type {Map<number, TPCSSRCInfo>}
*/
this.localSSRCs = new Map();
/**
* The local ICE username fragment for this session.
*/
this.localUfrag = null;
/**
* The remote ICE username fragment for this session.
*/
this.remoteUfrag = null;
/**
* The signaling layer which operates this peer connection.
* @type {SignalingLayer}
*/
this.signalingLayer = signalingLayer;
// SignalingLayer listeners
this._peerVideoTypeChanged = this._peerVideoTypeChanged.bind(this);
this.signalingLayer.on(
SignalingEvents.PEER_VIDEO_TYPE_CHANGED,
this._peerVideoTypeChanged);
this._peerMutedChanged = this._peerMutedChanged.bind(this);
this.signalingLayer.on(
SignalingEvents.PEER_MUTED_CHANGED,
this._peerMutedChanged);
this.options = options;
// Make sure constraints is properly formatted in order to provide information about whether or not this
// connection is P2P to rtcstats.
const safeConstraints = constraints || {};
safeConstraints.optional = safeConstraints.optional || [];
// The `optional` parameter needs to be of type array, otherwise chrome will throw an error.
// Firefox and Safari just ignore it.
if (Array.isArray(safeConstraints.optional)) {
safeConstraints.optional.push({ rtcStatsSFUP2P: this.isP2P });
} else {
logger.warn('Optional param is not an array, rtcstats p2p data is omitted.');
}
this.peerconnection = new RTCUtils.RTCPeerConnectionType(pcConfig, safeConstraints);
this.tpcUtils = new TPCUtils(this);
this.updateLog = [];
this.stats = {};
this.statsinterval = null;
/**
* Flag used to indicate if simulcast is turned off and a cap of 500 Kbps is applied on screensharing.
*/
this._capScreenshareBitrate = this.options.capScreenshareBitrate;
/**
* Flag used to indicate if the browser is running in unified plan mode.
*/
this._usesUnifiedPlan = options.usesUnifiedPlan;
/**
* Flag used to indicate if RTCRtpTransceiver#setCodecPreferences is to be used instead of SDP
* munging for codec selection.
*/
this._usesTransceiverCodecPreferences = browser.supportsCodecPreferences() && this._usesUnifiedPlan;
this._usesTransceiverCodecPreferences
&& logger.info('Using RTCRtpTransceiver#setCodecPreferences for codec selection');
/**
* @type {number} The max number of stats to keep in this.stats. Limit to
* 300 values, i.e. 5 minutes; set to 0 to disable
*/
this.maxstats = options.maxstats;
this.interop = new Interop();
const Simulcast = require('@jitsi/sdp-simulcast');
this.simulcast = new Simulcast(
{
numOfLayers: SIM_LAYER_RIDS.length,
explodeRemoteSimulcast: false,
usesUnifiedPlan: this._usesUnifiedPlan
});
this.sdpConsistency = new SdpConsistency(this.toString());
/**
* Munges local SDP provided to the Jingle Session in order to prevent from
* sending SSRC updates on attach/detach and mute/unmute (for video).
* @type {LocalSdpMunger}
*/
this.localSdpMunger = new LocalSdpMunger(this, this.rtc.getLocalEndpointId());
/**
* TracablePeerConnection uses RTC's eventEmitter
* @type {EventEmitter}
*/
this.eventEmitter = rtc.eventEmitter;
this.rtxModifier = new RtxModifier();
/**
* The height constraint applied on the video sender. The default value is 2160 (4K) when layer suspension is
* explicitly disabled.
*/
this._senderVideoMaxHeight = 2160;
// override as desired
this.trace = (what, info) => {
logger.debug(what, info);
this.updateLog.push({
time: new Date(),
type: what,
value: info || ''
});
};
this.onicecandidate = null;
this.peerconnection.onicecandidate = event => {
this.trace(
'onicecandidate',
JSON.stringify(event.candidate, null, ' '));
if (this.onicecandidate !== null) {
this.onicecandidate(event);
}
};
// Use track events when browser is running in unified plan mode and stream events in plan-b mode.
if (this._usesUnifiedPlan) {
this.onTrack = evt => {
const stream = evt.streams[0];
this._remoteTrackAdded(stream, evt.track, evt.transceiver);
stream.addEventListener('removetrack', e => {
this._remoteTrackRemoved(stream, e.track);
});
};
this.peerconnection.addEventListener('track', this.onTrack);
} else {
this.peerconnection.onaddstream = event => this._remoteStreamAdded(event.stream);
this.peerconnection.onremovestream = event => this._remoteStreamRemoved(event.stream);
}
this.onsignalingstatechange = null;
this.peerconnection.onsignalingstatechange = event => {
this.trace('onsignalingstatechange', this.signalingState);
if (this.onsignalingstatechange !== null) {
this.onsignalingstatechange(event);
}
};
this.oniceconnectionstatechange = null;
this.peerconnection.oniceconnectionstatechange = event => {
this.trace('oniceconnectionstatechange', this.iceConnectionState);
if (this.oniceconnectionstatechange !== null) {
this.oniceconnectionstatechange(event);
}
};
this.onnegotiationneeded = null;
this.peerconnection.onnegotiationneeded = event => {
this.trace('onnegotiationneeded');
if (this.onnegotiationneeded !== null) {
this.onnegotiationneeded(event);
}
};
this.onconnectionstatechange = null;
this.peerconnection.onconnectionstatechange = event => {
this.trace('onconnectionstatechange', this.connectionState);
if (this.onconnectionstatechange !== null) {
this.onconnectionstatechange(event);
}
};
this.ondatachannel = null;
this.peerconnection.ondatachannel = event => {
this.trace('ondatachannel');
if (this.ondatachannel !== null) {
this.ondatachannel(event);
}
};
if (this.maxstats) {
this.statsinterval = window.setInterval(() => {
this.getStats().then(stats => {
if (typeof stats?.result === 'function') {
const results = stats.result();
for (let i = 0; i < results.length; ++i) {
const res = results[i];
res.names().forEach(name => {
this._processStat(res, name, res.stat(name));
});
}
} else {
stats.forEach(r => this._processStat(r, '', r));
}
});
}, 1000);
}
logger.info(`Create new ${this}`);
}
/* eslint-enable max-params */
/**
* Process stat and adds it to the array of stats we store.
* @param report the current stats report.
* @param name the name of the report, if available
* @param statValue the value to add.
* @private
*/
TraceablePeerConnection.prototype._processStat
= function(report, name, statValue) {
const id = `${report.id}-${name}`;
let s = this.stats[id];
const now = new Date();
if (!s) {
this.stats[id] = s = {
startTime: now,
endTime: now,
values: [],
times: []
};
}
s.values.push(statValue);
s.times.push(now.getTime());
if (s.values.length > this.maxstats) {
s.values.shift();
s.times.shift();
}
s.endTime = now;
};
/**
* Returns a string representation of a SessionDescription object.
*/
const dumpSDP = function(description) {
if (typeof description === 'undefined' || description === null) {
return '';
}
return `type: ${description.type}\r\n${description.sdp}`;
};
/**
* Forwards the {@link peerconnection.iceConnectionState} state except that it
* will convert "completed" into "connected" where both mean that the ICE has
* succeeded and is up and running. We never see "completed" state for
* the JVB connection, but it started appearing for the P2P one. This method
* allows to adapt old logic to this new situation.
* @return {string}
*/
TraceablePeerConnection.prototype.getConnectionState = function() {
const state = this.peerconnection.iceConnectionState;
if (state === 'completed') {
return 'connected';
}
return state;
};
/**
* Obtains the media direction for given {@link MediaType}. The method takes
* into account whether or not there are any local tracks for media and
* the {@link audioTransferActive} and {@link videoTransferActive} flags.
* @param {MediaType} mediaType
* @param {boolean} isAddOperation whether the direction is to be calculated after a source-add action.
* @return {string} one of the SDP direction constants ('sendrecv, 'recvonly'
* etc.) which should be used when setting local description on the peer
* connection.
* @private
*/
TraceablePeerConnection.prototype.getDesiredMediaDirection = function(mediaType, isAddOperation = false) {
const hasLocalSource = this.hasAnyTracksOfType(mediaType);
if (this._usesUnifiedPlan) {
return isAddOperation
? hasLocalSource ? MediaDirection.SENDRECV : MediaDirection.SENDONLY
: hasLocalSource ? MediaDirection.RECVONLY : MediaDirection.INACTIVE;
}
const mediaTransferActive = mediaType === MediaType.AUDIO ? this.audioTransferActive : this.videoTransferActive;
if (mediaTransferActive) {
return hasLocalSource ? MediaDirection.SENDRECV : MediaDirection.RECVONLY;
}
return MediaDirection.INACTIVE;
};
/**
* Returns the list of RTCRtpReceivers created for the source of the given media type associated with
* the set of remote endpoints specified.
* @param {Array<string>} endpoints list of the endpoints
* @param {string} mediaType 'audio' or 'video'
* @returns {Array<RTCRtpReceiver>} list of receivers created by the peerconnection.
*/
TraceablePeerConnection.prototype._getReceiversByEndpointIds = function(endpoints, mediaType) {
let remoteTracks = [];
let receivers = [];
for (const endpoint of endpoints) {
remoteTracks = remoteTracks.concat(this.getRemoteTracks(endpoint, mediaType));
}
// Get the ids of the MediaStreamTracks associated with each of these remote tracks.
const remoteTrackIds = remoteTracks.map(remote => remote.track?.id);
receivers = this.peerconnection.getReceivers()
.filter(receiver => receiver.track
&& receiver.track.kind === mediaType
&& remoteTrackIds.find(trackId => trackId === receiver.track.id));
return receivers;
};
/**
* Tells whether or not this TPC instance is using Simulcast.
* @return {boolean} <tt>true</tt> if simulcast is enabled and active or
* <tt>false</tt> if it's turned off.
*/
TraceablePeerConnection.prototype.isSimulcastOn = function() {
return !this.options.disableSimulcast;
};
/**
* Handles {@link SignalingEvents.PEER_VIDEO_TYPE_CHANGED}
* @param {string} endpointId the video owner's ID (MUC nickname)
* @param {VideoType} videoType the new value
* @private
*/
TraceablePeerConnection.prototype._peerVideoTypeChanged = function(
endpointId,
videoType) {
// Check if endpointId has a value to avoid action on random track
if (!endpointId) {
logger.error(`${this} No endpointID on peerVideoTypeChanged`);
return;
}
const videoTrack = this.getRemoteTracks(endpointId, MediaType.VIDEO);
if (videoTrack.length) {
// NOTE 1 track per media type is assumed
videoTrack[0]._setVideoType(videoType);
}
};
/**
* Handles remote track mute / unmute events.
* @param {string} endpointId the track owner's identifier (MUC nickname)
* @param {MediaType} mediaType "audio" or "video"
* @param {boolean} isMuted the new mute state
* @private
*/
TraceablePeerConnection.prototype._peerMutedChanged = function(
endpointId,
mediaType,
isMuted) {
// Check if endpointId is a value to avoid doing action on all remote tracks
if (!endpointId) {
logger.error(`${this} On peerMuteChanged - no endpoint ID`);
return;
}
const track = this.getRemoteTracks(endpointId, mediaType);
if (track.length) {
// NOTE 1 track per media type is assumed
track[0].setMute(isMuted);
}
};
/**
* Obtains audio levels of the remote audio tracks by getting the source information on the RTCRtpReceivers.
* The information relevant to the ssrc is updated each time a RTP packet constaining the ssrc is received.
* @param {Array<string>} speakerList list of endpoint ids for which audio levels are to be gathered.
* @returns {Object} containing ssrc and audio level information as a key-value pair.
*/
TraceablePeerConnection.prototype.getAudioLevels = function(speakerList = []) {
const audioLevels = {};
const audioReceivers = speakerList.length
? this._getReceiversByEndpointIds(speakerList, MediaType.AUDIO)
: this.peerconnection.getReceivers()
.filter(receiver => receiver.track && receiver.track.kind === MediaType.AUDIO && receiver.track.enabled);
audioReceivers.forEach(remote => {
const ssrc = remote.getSynchronizationSources();
if (ssrc && ssrc.length) {
// As per spec, this audiolevel is a value between 0..1 (linear), where 1.0
// represents 0 dBov, 0 represents silence, and 0.5 represents approximately
// 6 dBSPL change in the sound pressure level from 0 dBov.
// https://www.w3.org/TR/webrtc/#dom-rtcrtpcontributingsource-audiolevel
audioLevels[ssrc[0].source] = ssrc[0].audioLevel;
}
});
return audioLevels;
};
/**
* Obtains local tracks for given {@link MediaType}. If the <tt>mediaType</tt>
* argument is omitted the list of all local tracks will be returned.
* @param {MediaType} [mediaType]
* @return {Array<JitsiLocalTrack>}
*/
TraceablePeerConnection.prototype.getLocalTracks = function(mediaType) {
let tracks = Array.from(this.localTracks.values());
if (mediaType !== undefined) {
tracks = tracks.filter(track => track.getType() === mediaType);
}
return tracks;
};
/**
* Retrieves the local video track.
*
* @returns {JitsiLocalTrack|undefined} - local video track.
*/
TraceablePeerConnection.prototype.getLocalVideoTrack = function() {
return this.getLocalTracks(MediaType.VIDEO)[0];
};
/**
* Checks whether or not this {@link TraceablePeerConnection} instance contains
* any local tracks for given <tt>mediaType</tt>.
* @param {MediaType} mediaType
* @return {boolean}
*/
TraceablePeerConnection.prototype.hasAnyTracksOfType = function(mediaType) {
if (!mediaType) {
throw new Error('"mediaType" is required');
}
return this.getLocalTracks(mediaType).length > 0;
};
/**
* Obtains all remote tracks currently known to this PeerConnection instance.
* @param {string} [endpointId] the track owner's identifier (MUC nickname)
* @param {MediaType} [mediaType] the remote tracks will be filtered
* by their media type if this argument is specified.
* @return {Array<JitsiRemoteTrack>}
*/
TraceablePeerConnection.prototype.getRemoteTracks = function(
endpointId,
mediaType) {
const remoteTracks = [];
const endpoints
= endpointId ? [ endpointId ] : this.remoteTracks.keys();
for (const endpoint of endpoints) {
const endpointTrackMap = this.remoteTracks.get(endpoint);
if (!endpointTrackMap) {
// Otherwise an empty Map() would have to be allocated above
// eslint-disable-next-line no-continue
continue;
}
for (const trackMediaType of endpointTrackMap.keys()) {
// per media type filtering
if (!mediaType || mediaType === trackMediaType) {
const mediaTrack = endpointTrackMap.get(trackMediaType);
if (mediaTrack) {
remoteTracks.push(mediaTrack);
}
}
}
}
return remoteTracks;
};
/**
* Parses the remote description and returns the sdp lines of the sources associated with a remote participant.
*
* @param {string} id Endpoint id of the remote participant.
* @returns {Array<string>} The sdp lines that have the ssrc information.
*/
TraceablePeerConnection.prototype.getRemoteSourceInfoByParticipant = function(id) {
const removeSsrcInfo = [];
const remoteTracks = this.getRemoteTracks(id);
if (!remoteTracks?.length) {
return removeSsrcInfo;
}
const primarySsrcs = remoteTracks.map(track => track.getSSRC());
const sdp = new SDP(this.remoteDescription.sdp);
primarySsrcs.forEach((ssrc, idx) => {
for (const media of sdp.media) {
let lines = '';
let ssrcLines = SDPUtil.findLines(media, `a=ssrc:${ssrc}`);
if (ssrcLines.length) {
if (!removeSsrcInfo[idx]) {
removeSsrcInfo[idx] = '';
}
// Check if there are any FID groups present for the primary ssrc.
const fidLines = SDPUtil.findLines(media, `a=ssrc-group:FID ${ssrc}`);
if (fidLines.length) {
const secondarySsrc = fidLines[0].split(' ')[2];
lines += `${fidLines[0]}\r\n`;
ssrcLines = ssrcLines.concat(SDPUtil.findLines(media, `a=ssrc:${secondarySsrc}`));
}
removeSsrcInfo[idx] += `${ssrcLines.join('\r\n')}\r\n`;
removeSsrcInfo[idx] += lines;
}
}
});
return removeSsrcInfo;
};
/**
* Returns the target bitrates configured for the local video source.
*
* @returns {Object}
*/
TraceablePeerConnection.prototype.getTargetVideoBitrates = function() {
const currentCodec = this.getConfiguredVideoCodec();
return this.tpcUtils.videoBitrates[currentCodec.toUpperCase()] || this.tpcUtils.videoBitrates;
};
/**
* Tries to find {@link JitsiTrack} for given SSRC number. It will search both
* local and remote tracks bound to this instance.
* @param {number} ssrc
* @return {JitsiTrack|null}
*/
TraceablePeerConnection.prototype.getTrackBySSRC = function(ssrc) {
if (typeof ssrc !== 'number') {
throw new Error(`SSRC ${ssrc} is not a number`);
}
for (const localTrack of this.localTracks.values()) {
if (this.getLocalSSRC(localTrack) === ssrc) {
return localTrack;
}
}
for (const remoteTrack of this.getRemoteTracks()) {
if (remoteTrack.getSSRC() === ssrc) {
return remoteTrack;
}
}
return null;
};
/**
* Tries to find SSRC number for given {@link JitsiTrack} id. It will search
* both local and remote tracks bound to this instance.
* @param {string} id
* @return {number|null}
*/
TraceablePeerConnection.prototype.getSsrcByTrackId = function(id) {
const findTrackById = track => track.getTrack().id === id;
const localTrack = this.getLocalTracks().find(findTrackById);
if (localTrack) {
return this.getLocalSSRC(localTrack);
}
const remoteTrack = this.getRemoteTracks().find(findTrackById);
if (remoteTrack) {
return remoteTrack.getSSRC();
}
return null;
};
/**
* Called when new remote MediaStream is added to the PeerConnection.
* @param {MediaStream} stream the WebRTC MediaStream for remote participant
*/
TraceablePeerConnection.prototype._remoteStreamAdded = function(stream) {
const streamId = RTC.getStreamID(stream);
if (!RTC.isUserStreamById(streamId)) {
logger.info(`${this} ignored remote 'stream added' event for non-user stream[id=${streamId}]`);
return;
}
// Bind 'addtrack'/'removetrack' event handlers
if (browser.isChromiumBased()) {
stream.onaddtrack = event => {
this._remoteTrackAdded(stream, event.track);
};
stream.onremovetrack = event => {
this._remoteTrackRemoved(stream, event.track);
};
}
// Call remoteTrackAdded for each track in the stream
const streamAudioTracks = stream.getAudioTracks();
for (const audioTrack of streamAudioTracks) {
this._remoteTrackAdded(stream, audioTrack);
}
const streamVideoTracks = stream.getVideoTracks();
for (const videoTrack of streamVideoTracks) {
this._remoteTrackAdded(stream, videoTrack);
}
};
/**
* Called on "track added" and "stream added" PeerConnection events (because we
* handle streams on per track basis). Finds the owner and the SSRC for
* the track and passes that to ChatRoom for further processing.
* @param {MediaStream} stream the WebRTC MediaStream instance which is
* the parent of the track
* @param {MediaStreamTrack} track the WebRTC MediaStreamTrack added for remote
* participant.
* @param {RTCRtpTransceiver} transceiver the WebRTC transceiver that is created
* for the remote participant in unified plan.
*/
TraceablePeerConnection.prototype._remoteTrackAdded = function(stream, track, transceiver = null) {
const streamId = RTC.getStreamID(stream);
const mediaType = track.kind;
if (!this.isP2P && !RTC.isUserStreamById(streamId)) {
logger.info(`${this} ignored remote 'stream added' event for non-user stream[id=${streamId}]`);
return;
}
logger.info(`${this} adding remote track for stream[id=${streamId},type=${mediaType}]`);
// look up an associated JID for a stream id
if (!mediaType) {
GlobalOnErrorHandler.callErrorHandler(
new Error(
`MediaType undefined for remote track, stream id: ${streamId}`
));
// Abort
return;
}
const remoteSDP = this._usesUnifiedPlan
? new SDP(this.peerconnection.remoteDescription.sdp)
: new SDP(this.remoteDescription.sdp);
let mediaLines;
// In unified plan mode, find the matching mline using 'mid' if its availble, otherwise use the
// 'msid' attribute of the stream.
if (this._usesUnifiedPlan) {
if (transceiver && transceiver.mid) {
const mid = transceiver.mid;
mediaLines = remoteSDP.media.filter(mls => SDPUtil.findLine(mls, `a=mid:${mid}`));
} else {
mediaLines = remoteSDP.media.filter(mls => {
const msid = SDPUtil.findLine(mls, 'a=msid:');
return typeof msid !== 'undefined' && streamId === msid.substring(7).split(' ')[0];
});
}
} else {
mediaLines = remoteSDP.media.filter(mls => mls.startsWith(`m=${mediaType}`));
}
if (!mediaLines.length) {
GlobalOnErrorHandler.callErrorHandler(
new Error(`No media lines found in remote SDP for remote stream[id=${streamId},type=${mediaType}]`));
// Abort
return;
}
let ssrcLines = SDPUtil.findLines(mediaLines[0], 'a=ssrc:');
ssrcLines
= ssrcLines.filter(line => line.indexOf(`msid:${streamId}`) !== -1);
if (!ssrcLines.length) {
GlobalOnErrorHandler.callErrorHandler(
new Error(`No SSRC lines found in remote SDP for remote stream[msid=${streamId},type=${mediaType}]`));
// Abort
return;
}
// FIXME the length of ssrcLines[0] not verified, but it will fail
// with global error handler anyway
const ssrcStr = ssrcLines[0].substring(7).split(' ')[0];
const trackSsrc = Number(ssrcStr);
const ownerEndpointId = this.signalingLayer.getSSRCOwner(trackSsrc);
if (isNaN(trackSsrc) || trackSsrc < 0) {
GlobalOnErrorHandler.callErrorHandler(
new Error(
`Invalid SSRC for remote stream[ssrc=${trackSsrc},id=${streamId},type=${mediaType}]`));
// Abort
return;
} else if (!ownerEndpointId) {
GlobalOnErrorHandler.callErrorHandler(
new Error(
`No SSRC owner known for remote stream[ssrc=${trackSsrc},id=${streamId},type=${mediaType}]`));
// Abort
return;
}
let sourceName;
if (FeatureFlags.isSourceNameSignalingEnabled()) {
sourceName = this.signalingLayer.getTrackSourceName(trackSsrc);
// If source name was not signaled, we'll generate one which allows testing signaling
// when mixing legacy(mobile) with new clients.
if (!sourceName) {
sourceName = getSourceNameForJitsiTrack(ownerEndpointId, mediaType, 0);
}
}
// eslint-disable-next-line no-undef
logger.info(`${this} creating remote track[endpoint=${ownerEndpointId},ssrc=${trackSsrc},`
+ `type=${mediaType},sourceName=${sourceName}]`);
const peerMediaInfo
= this.signalingLayer.getPeerMediaInfo(ownerEndpointId, mediaType);
if (!peerMediaInfo) {
GlobalOnErrorHandler.callErrorHandler(
new Error(`${this}: no peer media info available for ${ownerEndpointId}`));
return;
}
const muted = peerMediaInfo.muted;
const videoType = peerMediaInfo.videoType; // can be undefined
// eslint-disable-next-line no-undef
this._createRemoteTrack(
ownerEndpointId, stream, track, mediaType, videoType, trackSsrc, muted, sourceName);
};
// FIXME cleanup params
/* eslint-disable max-params */
/**
* Initializes a new JitsiRemoteTrack instance with the data provided by
* the signaling layer and SDP.
*
* @param {string} ownerEndpointId the owner's endpoint ID (MUC nickname)
* @param {MediaStream} stream the WebRTC stream instance
* @param {MediaStreamTrack} track the WebRTC track instance
* @param {MediaType} mediaType the track's type of the media
* @param {VideoType} [videoType] the track's type of the video (if applicable)
* @param {number} ssrc the track's main SSRC number
* @param {boolean} muted the initial muted status
* @param {String} sourceName the track's source name
*/
TraceablePeerConnection.prototype._createRemoteTrack = function(
ownerEndpointId,
stream,
track,
mediaType,
videoType,
ssrc,
muted,
sourceName) {
let remoteTracksMap = this.remoteTracks.get(ownerEndpointId);
if (!remoteTracksMap) {
remoteTracksMap = new Map();
this.remoteTracks.set(ownerEndpointId, remoteTracksMap);
}
const existingTrack = remoteTracksMap.get(mediaType);
if (existingTrack && existingTrack.getTrack() === track) {
// Ignore duplicated event which can originate either from 'onStreamAdded' or 'onTrackAdded'.
logger.info(`${this} ignored duplicated track event for track[endpoint=${ownerEndpointId},type=${mediaType}]`);
return;
} else if (existingTrack) {
logger.error(`${this} received a second remote track for track[endpoint=${ownerEndpointId},type=${mediaType}]`
+ 'deleting the existing track');
// The exisiting track needs to be removed here. We can get here when Jicofo reverses the order of source-add
// and source-remove messages. Ideally, when a remote endpoint changes source, like switching devices, it sends
// a source-remove (for old ssrc) followed by a source-add (for new ssrc) and Jicofo then should forward these
// two messages to all the other endpoints in the conference in the same order. However, sometimes, these
// messages arrive at the client in the reverse order resulting in two remote tracks (of same media type) being
// created and in case of video, a black strip (that of the first track which has ended) appears over the live
// track obscuring it. Removing the existing track when that happens will fix this issue.
this._remoteTrackRemoved(existingTrack.getOriginalStream(), existingTrack.getTrack());
}
const remoteTrack
= new JitsiRemoteTrack(
this.rtc,
this.rtc.conference,
ownerEndpointId,
stream,
track,
mediaType,
videoType,
ssrc,
muted,
this.isP2P,
sourceName);
remoteTracksMap.set(mediaType, remoteTrack);
this.eventEmitter.emit(RTCEvents.REMOTE_TRACK_ADDED, remoteTrack, this);
};
/* eslint-enable max-params */
/**
* Handles remote stream removal.
* @param stream the WebRTC MediaStream object which is being removed from the
* PeerConnection
*/
TraceablePeerConnection.prototype._remoteStreamRemoved = function(stream) {
if (!RTC.isUserStream(stream)) {
const id = RTC.getStreamID(stream);
logger.info(`Ignored remote 'stream removed' event for stream[id=${id}]`);
return;
}
// Call remoteTrackRemoved for each track in the stream
const streamVideoTracks = stream.getVideoTracks();
for (const videoTrack of streamVideoTracks) {
this._remoteTrackRemoved(stream, videoTrack);
}
const streamAudioTracks = stream.getAudioTracks();
for (const audioTrack of streamAudioTracks) {
this._remoteTrackRemoved(stream, audioTrack);
}
};
/**
* Handles remote media track removal.
* @param {MediaStream} stream WebRTC MediaStream instance which is the parent
* of the track.
* @param {MediaStreamTrack} track the WebRTC MediaStreamTrack which has been
* removed from the PeerConnection.
*/
TraceablePeerConnection.prototype._remoteTrackRemoved = function(
stream,
track) {
const streamId = RTC.getStreamID(stream);
const trackId = track && RTC.getTrackID(track);
if (!RTC.isUserStreamById(streamId)) {
logger.info(`${this} ignored remote 'stream removed' event for non-user stream[id=${streamId}]`);
return;
}
logger.info(`${this} remote track removed stream[id=${streamId},trackId=${trackId}]`);
if (!streamId) {
GlobalOnErrorHandler.callErrorHandler(new Error(`${this} remote track removal failed - no stream ID`));
return;
}
if (!trackId) {
GlobalOnErrorHandler.callErrorHandler(new Error(`${this} remote track removal failed - no track ID`));
return;
}
if (!this._removeRemoteTrackById(streamId, trackId)) {
// NOTE this warning is always printed when user leaves the room,
// because we remove remote tracks manually on MUC member left event,
// before the SSRCs are removed by Jicofo. In most cases it is fine to
// ignore this warning, but still it's better to keep it printed for
// debugging purposes.
//
// We could change the behaviour to emit track removed only from here,
// but the order of the events will change and consuming apps could
// behave unexpectedly (the "user left" event would come before "track
// removed" events).
logger.warn(`${this} Removed track not found for stream[id=${streamId},trackId=${trackId}]`);
}
};
/**
* Finds remote track by it's stream and track ids.
* @param {string} streamId the media stream id as defined by the WebRTC
* @param {string} trackId the media track id as defined by the WebRTC
* @return {JitsiRemoteTrack|undefined} the track's instance or
* <tt>undefined</tt> if not found.
* @private
*/
TraceablePeerConnection.prototype._getRemoteTrackById = function(
streamId,
trackId) {
// .find will break the loop once the first match is found
for (const endpointTrackMap of this.remoteTracks.values()) {
for (const mediaTrack of endpointTrackMap.values()) {
// FIXME verify and try to use ===
/* eslint-disable eqeqeq */
if (mediaTrack.getStreamId() == streamId
&& mediaTrack.getTrackId() == trackId) {
return mediaTrack;
}
/* eslint-enable eqeqeq */
}
}
return undefined;
};
/**
* Removes all JitsiRemoteTracks associated with given MUC nickname
* (resource part of the JID). Returns array of removed tracks.
*
* @param {string} owner - The resource part of the MUC JID.
* @returns {JitsiRemoteTrack[]}
*/
TraceablePeerConnection.prototype.removeRemoteTracks = function(owner) {
const removedTracks = [];
const remoteTracksMap = this.remoteTracks.get(owner);
if (remoteTracksMap) {
const removedAudioTrack = remoteTracksMap.get(MediaType.AUDIO);
const removedVideoTrack = remoteTracksMap.get(MediaType.VIDEO);
removedAudioTrack && removedTracks.push(removedAudioTrack);
removedVideoTrack && removedTracks.push(removedVideoTrack);
this.remoteTracks.delete(owner);
}
logger.debug(`${this} removed remote tracks[endpoint=${owner},count=${removedTracks.length}`);
return removedTracks;
};
/**
* Removes and disposes given <tt>JitsiRemoteTrack</tt> instance. Emits
* {@link RTCEvents.REMOTE_TRACK_REMOVED}.
* @param {JitsiRemoteTrack} toBeRemoved
*/
TraceablePeerConnection.prototype._removeRemoteTrack = function(toBeRemoved) {
toBeRemoved.dispose();
const participantId = toBeRemoved.getParticipantId();
const remoteTracksMap = this.remoteTracks.get(participantId);
if (!remoteTracksMap) {
logger.error(`${this} removeRemoteTrack: no remote tracks map for endpoint=${participantId}`);
} else if (!remoteTracksMap.delete(toBeRemoved.getType())) {
logger.error(`${this} Failed to remove ${toBeRemoved} - type mapping messed up ?`);
}
this.eventEmitter.emit(RTCEvents.REMOTE_TRACK_REMOVED, toBeRemoved);
};
/**
* Removes and disposes <tt>JitsiRemoteTrack</tt> identified by given stream and
* track ids.
*
* @param {string} streamId the media stream id as defined by the WebRTC
* @param {string} trackId the media track id as defined by the WebRTC
* @returns {JitsiRemoteTrack|undefined} the track which has been removed or
* <tt>undefined</tt> if no track matching given stream and track ids was
* found.
*/
TraceablePeerConnection.prototype._removeRemoteTrackById = function(
streamId,
trackId) {
const toBeRemoved = this._getRemoteTrackById(streamId, trackId);
if (toBeRemoved) {
this._removeRemoteTrack(toBeRemoved);
}
return toBeRemoved;
};
/**
* Returns a map with keys msid/mediaType and <tt>TrackSSRCInfo</tt> values.
* @param {RTCSessionDescription} desc the local description.
* @return {Map<string,TrackSSRCInfo>}
*/
TraceablePeerConnection.prototype._extractSSRCMap = function(desc) {
/**
* Track SSRC infos mapped by stream ID (msid) or mediaType (unfied-plan)
* @type {Map<string,TrackSSRCInfo>}
*/
const ssrcMap = new Map();
/**
* Groups mapped by primary SSRC number
* @type {Map<number,Array<SSRCGroupInfo>>}
*/
const groupsMap = new Map();
if (typeof desc !== 'object' || desc === null
|| typeof desc.sdp !== 'string') {
logger.warn('An empty description was passed as an argument');
return ssrcMap;
}
const session = transform.parse(desc.sdp);
if (!Array.isArray(session.media)) {
return ssrcMap;
}
let media = session.media;
// For unified plan clients, only the first audio and video mlines will have ssrcs for the local sources.
// The rest of the m-lines are for the recv-only sources, one for each remote source.
if (this._usesUnifiedPlan) {
media = [];
[ MediaType.AUDIO, MediaType.VIDEO ].forEach(mediaType => {
const mLine = session.media.find(m => m.type === mediaType);
mLine && media.push(mLine);
});
}
for (const mLine of media) {
if (!Array.isArray(mLine.ssrcs)) {
continue; // eslint-disable-line no-continue
}
if (Array.isArray(mLine.ssrcGroups)) {
for (const group of mLine.ssrcGroups) {
if (typeof group.semantics !== 'undefined'
&& typeof group.ssrcs !== 'undefined') {
// Parse SSRCs and store as numbers
const groupSSRCs = group.ssrcs.split(' ').map(ssrcStr => parseInt(ssrcStr, 10));
const primarySSRC = groupSSRCs[0];
// Note that group.semantics is already present
group.ssrcs = groupSSRCs;
// eslint-disable-next-line max-depth
if (!groupsMap.has(primarySSRC)) {
groupsMap.set(primarySSRC, []);
}
groupsMap.get(primarySSRC).push(group);
}
}
}
let ssrcs = mLine.ssrcs;
// Filter the ssrcs with 'msid' attribute for plan-b clients and 'cname' for unified-plan clients.
ssrcs = this._usesUnifiedPlan
? ssrcs.filter(s => s.attribute === 'cname')
: ssrcs.filter(s => s.attribute === 'msid');
for (const ssrc of ssrcs) {
// Use the mediaType as key for the source map for unified plan clients since msids are not part of
// the standard and the unified plan SDPs do not have a proper msid attribute for the sources.
// Also the ssrcs for sources do not change for Unified plan clients since RTCRtpSender#replaceTrack is
// used for switching the tracks so it is safe to use the mediaType as the key for the TrackSSRCInfo map.
const key = this._usesUnifiedPlan ? mLine.type : ssrc.value;
const ssrcNumber = ssrc.id;
let ssrcInfo = ssrcMap.get(key);
if (!ssrcInfo) {
ssrcInfo = {
ssrcs: [],
groups: [],
msid: key
};
ssrcMap.set(key, ssrcInfo);
}
ssrcInfo.ssrcs.push(ssrcNumber);
if (groupsMap.has(ssrcNumber)) {
const ssrcGroups = groupsMap.get(ssrcNumber);
for (const group of ssrcGroups) {
ssrcInfo.groups.push(group);
}
}
}
}
return ssrcMap;
};
/**
* Takes a SessionDescription object and returns a "normalized" version.
* Currently it takes care of ordering the a=ssrc lines and denoting receive
* only SSRCs.
*/
const normalizePlanB = function(desc) {
if (typeof desc !== 'object' || desc === null
|| typeof desc.sdp !== 'string') {
logger.warn('An empty description was passed as an argument');
return desc;
}
// eslint-disable-next-line no-shadow
const transform = require('sdp-transform');
const session = transform.parse(desc.sdp);
if (typeof session !== 'undefined'
&& typeof session.media !== 'undefined'
&& Array.isArray(session.media)) {
session.media.forEach(mLine => {
// Chrome appears to be picky about the order in which a=ssrc lines
// are listed in an m-line when rtx is enabled (and thus there are
// a=ssrc-group lines with FID semantics). Specifically if we have
// "a=ssrc-group:FID S1 S2" and the "a=ssrc:S2" lines appear before
// the "a=ssrc:S1" lines, SRD fails.
// So, put SSRC which appear as the first SSRC in an FID ssrc-group
// first.
const firstSsrcs = [];
const newSsrcLines = [];
if (typeof mLine.ssrcGroups !== 'undefined'
&& Array.isArray(mLine.ssrcGroups)) {
mLine.ssrcGroups.forEach(group => {
if (typeof group.semantics !== 'undefined'
&& group.semantics === 'FID') {
if (typeof group.ssrcs !== 'undefined') {
firstSsrcs.push(Number(group.ssrcs.split(' ')[0]));
}
}
});
}
if (Array.isArray(mLine.ssrcs)) {
let i;
for (i = 0; i < mLine.ssrcs.length; i++) {
if (typeof mLine.ssrcs[i] === 'object'
&& typeof mLine.ssrcs[i].id !== 'undefined'
&& firstSsrcs.indexOf(mLine.ssrcs[i].id) >= 0) {
newSsrcLines.push(mLine.ssrcs[i]);
delete mLine.ssrcs[i];
}
}
for (i = 0; i < mLine.ssrcs.length; i++) {
if (typeof mLine.ssrcs[i] !== 'undefined') {
newSsrcLines.push(mLine.ssrcs[i]);
}
}
mLine.ssrcs = replaceDefaultUnifiedPlanMsid(newSsrcLines);
}
});
}
const resStr = transform.write(session);
return new RTCSessionDescription({
type: desc.type,
sdp: resStr
});
};
/**
* Unified plan differentiates a remote track not associated with a stream using
* the msid "-", which can incorrectly trigger an onaddstream event in plan-b.
* For jitsi, these tracks are actually receive-only ssrcs. To prevent
* onaddstream from firing, remove the ssrcs with msid "-" except the cname
* line. Normally the ssrcs are not used by the client, as the bridge controls
* media flow, but keep one reference to the ssrc for the p2p case.
*
* @param {Array<Object>} ssrcLines - The ssrc lines from a remote description.
* @private
* @returns {Array<Object>} ssrcLines with removed lines referencing msid "-".
*/
function replaceDefaultUnifiedPlanMsid(ssrcLines = []) {
if (!browser.isChrome() || !browser.isVersionGreaterThan(70)) {
return ssrcLines;
}
let filteredLines = [ ...ssrcLines ];
const problematicSsrcIds = ssrcLines.filter(ssrcLine =>
ssrcLine.attribute === 'mslabel' && ssrcLine.value === '-')
.map(ssrcLine => ssrcLine.id);
problematicSsrcIds.forEach(ssrcId => {
// Find the cname which is to be modified and left in.
const cnameLine = filteredLines.find(line =>
line.id === ssrcId && line.attribute === 'cname');
cnameLine.value = `${MediaDirection.RECVONLY}-${ssrcId}`;
// Remove all of lines for the ssrc.
filteredLines
= filteredLines.filter(line => line.id !== ssrcId);
// But re-add the cname line so there is a reference kept to the ssrc
// in the SDP.
filteredLines.push(cnameLine);
});
return filteredLines;
}
/**
* Makes sure that both audio and video directions are configured as 'sendrecv'.
* @param {Object} localDescription the SDP object as defined by WebRTC.
* @param {object} options <tt>TracablePeerConnection</tt> config options.
*/
const enforceSendRecv = function(localDescription, options) {
if (!localDescription) {
throw new Error('No local description passed in.');
}
const transformer = new SdpTransformWrap(localDescription.sdp);
const audioMedia = transformer.selectMedia(MediaType.AUDIO);
let changed = false;
if (audioMedia && audioMedia.direction !== MediaDirection.SENDRECV) {
if (options.startSilent) {
audioMedia.direction = MediaDirection.INACTIVE;
} else {
audioMedia.direction = MediaDirection.SENDRECV;
}
changed = true;
}
const videoMedia = transformer.selectMedia(MediaType.VIDEO);
if (videoMedia && videoMedia.direction !== MediaDirection.SENDRECV) {
videoMedia.direction = MediaDirection.SENDRECV;
changed = true;
}
if (changed) {
return new RTCSessionDescription({
type: localDescription.type,
sdp: transformer.toRawSDP()
});
}
return localDescription;
};
/**
*
* @param {JitsiLocalTrack} localTrack
*/
TraceablePeerConnection.prototype.getLocalSSRC = function(localTrack) {
const ssrcInfo = this._getSSRC(localTrack.rtcId);
return ssrcInfo && ssrcInfo.ssrcs[0];
};
/**
* When doing unified plan simulcast, we'll have a set of ssrcs with the
* same msid but no ssrc-group, since unified plan signals the simulcast
* group via the a=simulcast line. Unfortunately, Jicofo will complain
* if it sees ssrcs with matching msids but no ssrc-group, so we'll inject
* an ssrc-group line to make Jicofo happy.
* @param desc A session description object (with 'type' and 'sdp' fields)
* @return A session description object with its sdp field modified to
* contain an inject ssrc-group for simulcast
*/
TraceablePeerConnection.prototype._injectSsrcGroupForUnifiedSimulcast
= function(desc) {
const sdp = transform.parse(desc.sdp);
const video = sdp.media.find(mline => mline.type === 'video');
// Check if the browser supports RTX, add only the primary ssrcs to the SIM group if that is the case.
video.ssrcGroups = video.ssrcGroups || [];
const fidGroups = video.ssrcGroups.filter(group => group.semantics === 'FID');
if (video.simulcast || video.simulcast_03) {
const ssrcs = [];
if (fidGroups && fidGroups.length) {
fidGroups.forEach(group => {
ssrcs.push(group.ssrcs.split(' ')[0]);
});
} else {
video.ssrcs.forEach(ssrc => {
if (ssrc.attribute === 'msid') {
ssrcs.push(ssrc.id);
}
});
}
if (video.ssrcGroups.find(group => group.semantics === 'SIM')) {
// Group already exists, no need to do anything
return desc;
}
video.ssrcGroups.push({
semantics: 'SIM',
ssrcs: ssrcs.join(' ')
});
}
return new RTCSessionDescription({
type: desc.type,
sdp: transform.write(sdp)
});
};
/* eslint-disable-next-line vars-on-top */
const getters = {
signalingState() {
return this.peerconnection.signalingState;
},
iceConnectionState() {
return this.peerconnection.iceConnectionState;
},
connectionState() {
return this.peerconnection.connectionState;
},
localDescription() {
let desc = this.peerconnection.localDescription;
if (!desc) {
logger.debug(`${this} getLocalDescription no localDescription found`);
return {};
}
this.trace('getLocalDescription::preTransform', dumpSDP(desc));
// If the browser is running in unified plan mode and this is a jvb connection,
// transform the SDP to Plan B first.
if (this._usesUnifiedPlan && !this.isP2P) {
desc = this.interop.toPlanB(desc);
this.trace('getLocalDescription::postTransform (Plan B)',
dumpSDP(desc));
desc = this._injectSsrcGroupForUnifiedSimulcast(desc);
this.trace('getLocalDescription::postTransform (inject ssrc group)',
dumpSDP(desc));
} else if (!this._usesUnifiedPlan) {
if (browser.doesVideoMuteByStreamRemove()) {
desc = this.localSdpMunger.maybeAddMutedLocalVideoTracksToSDP(desc);
logger.debug(
'getLocalDescription::postTransform (munge local SDP)', desc);
}
// What comes out of this getter will be signalled over Jingle to
// the other peer, so we need to make sure the media direction is
// 'sendrecv' because we won't change the direction later and don't want
// the other peer to think we can't send or receive.
//
// Note that the description we set in chrome does have the accurate
// direction (e.g. 'recvonly'), since that is technically what is
// happening (check setLocalDescription impl).
desc = enforceSendRecv(desc, this.options);
}
// See the method's doc for more info about this transformation.
desc = this.localSdpMunger.transformStreamIdentifiers(desc);
return desc;
},
remoteDescription() {
let desc = this.peerconnection.remoteDescription;
if (!desc) {
logger.debug(`${this} getRemoteDescription no remoteDescription found`);
return {};
}
this.trace('getRemoteDescription::preTransform', dumpSDP(desc));
if (this._usesUnifiedPlan) {
if (this.isP2P) {
// Adjust the media direction for p2p based on whether a local source has been added.
desc = this._adjustRemoteMediaDirection(desc);
} else {
// If this is a jvb connection, transform the SDP to Plan B first.
desc = this.interop.toPlanB(desc);
this.trace('getRemoteDescription::postTransform (Plan B)', dumpSDP(desc));
}
}
return desc;
}
};
Object.keys(getters).forEach(prop => {
Object.defineProperty(
TraceablePeerConnection.prototype,
prop, {
get: getters[prop]
}
);
});
TraceablePeerConnection.prototype._getSSRC = function(rtcId) {
return this.localSSRCs.get(rtcId);
};
/**
* Checks if low fps screensharing is in progress.
*
* @private
* @returns {boolean} Returns true if 5 fps screensharing is in progress, false otherwise.
*/
TraceablePeerConnection.prototype.isSharingLowFpsScreen = function() {
return this._isSharingScreen() && this._capScreenshareBitrate;
};
/**
* Checks if screensharing is in progress.
*
* @returns {boolean} Returns true if a desktop track has been added to the
* peerconnection, false otherwise.
*/
TraceablePeerConnection.prototype._isSharingScreen = function() {
const track = this.getLocalVideoTrack();
return track && track.videoType === VideoType.DESKTOP;
};
/**
* Munges the order of the codecs in the SDP passed based on the preference
* set through config.js settings. All instances of the specified codec are
* moved up to the top of the list when it is preferred. The specified codec
* is deleted from the list if the configuration specifies that the codec be
* disabled.
* @param {RTCSessionDescription} description that needs to be munged.
* @returns {RTCSessionDescription} the munged description.
*/
TraceablePeerConnection.prototype._mungeCodecOrder = function(description) {
if (!this.codecPreference) {
return description;
}
const parsedSdp = transform.parse(description.sdp);
// Only the m-line that defines the source the browser will be sending should need to change.
// This is typically the first m-line with the matching media type.
const mLine = parsedSdp.media.find(m => m.type === this.codecPreference.mediaType);
if (!mLine) {
return description;
}
if (this.codecPreference.enable) {
SDPUtil.preferCodec(mLine, this.codecPreference.mimeType);
// Strip the high profile H264 codecs on mobile clients for p2p connection.
// High profile codecs give better quality at the expense of higher load which
// we do not want on mobile clients.
// Jicofo offers only the baseline code for the jvb connection.
// TODO - add check for mobile browsers once js-utils provides that check.
if (this.codecPreference.mimeType === CodecMimeType.H264 && browser.isReactNative() && this.isP2P) {
SDPUtil.stripCodec(mLine, this.codecPreference.mimeType, true /* high profile */);
}
// Set the max bitrate here on the SDP so that the configured max. bitrate is effective
// as soon as the browser switches to VP9.
if (this.codecPreference.mimeType === CodecMimeType.VP9
&& this.getConfiguredVideoCodec() === CodecMimeType.VP9) {
const bitrates = this.tpcUtils.videoBitrates.VP9 || this.tpcUtils.videoBitrates;
const hdBitrate = bitrates.high ? bitrates.high : HD_BITRATE;
const limit = Math.floor((this._isSharingScreen() ? HD_BITRATE : hdBitrate) / 1000);
// Use only the HD bitrate for now as there is no API available yet for configuring
// the bitrates on the individual SVC layers.
mLine.bandwidth = [ {
type: 'AS',
limit
} ];
} else {
// Clear the bandwidth limit in SDP when VP9 is no longer the preferred codec.
// This is needed on react native clients as react-native-webrtc returns the
// SDP that the application passed instead of returning the SDP off the native side.
// This line automatically gets cleared on web on every renegotiation.
mLine.bandwidth = undefined;
}
} else {
SDPUtil.stripCodec(mLine, this.codecPreference.mimeType);
}
return new RTCSessionDescription({
type: description.type,
sdp: transform.write(parsedSdp)
});
};
/**
* Checks if given track belongs to this peerconnection instance.
*
* @param {JitsiLocalTrack|JitsiRemoteTrack} track - The track to be checked.
* @returns {boolean}
*/
TraceablePeerConnection.prototype.containsTrack = function(track) {
if (track.isLocal()) {
return this.localTracks.has(track.rtcId);
}
const participantId = track.getParticipantId();
const remoteTracksMap = this.remoteTracks.get(participantId);
return Boolean(remoteTracksMap && remoteTracksMap.get(track.getType()) === track);
};
/**
* Add {@link JitsiLocalTrack} to this TPC.
* @param {JitsiLocalTrack} track
* @param {boolean} isInitiator indicates if the endpoint is the offerer.
* @returns {Promise<void>} - resolved when done.
*/
TraceablePeerConnection.prototype.addTrack = function(track, isInitiator = false) {
const rtcId = track.rtcId;
logger.info(`${this} adding ${track}`);
if (this.localTracks.has(rtcId)) {
return Promise.reject(new Error(`${track} is already in ${this}`));
}
this.localTracks.set(rtcId, track);
const webrtcStream = track.getOriginalStream();
if (this._usesUnifiedPlan) {
logger.debug(`${this} TPC.addTrack using unified plan`);
if (webrtcStream) {
try {
this.tpcUtils.addTrack(track, isInitiator);
} catch (error) {
logger.error(`${this} Adding track=${track} failed: ${error?.message}`);
return Promise.reject(error);
}
}
} else {
// Use addStream API for the plan-b case.
if (webrtcStream) {
this._addStream(webrtcStream);
// It's not ok for a track to not have a WebRTC stream if:
} else if (!browser.doesVideoMuteByStreamRemove()
|| track.isAudioTrack()
|| (track.isVideoTrack() && !track.isMuted())) {
return Promise.reject(new Error(`${this} no WebRTC stream for track=${track}`));
}
// Muted video tracks do not have WebRTC stream
if (browser.doesVideoMuteByStreamRemove() && track.isVideoTrack() && track.isMuted()) {
const ssrcInfo = this.generateNewStreamSSRCInfo(track);
this.sdpConsistency.setPrimarySsrc(ssrcInfo.ssrcs[0]);
const simGroup
= ssrcInfo.groups.find(groupInfo => groupInfo.semantics === 'SIM');
if (simGroup) {
this.simulcast.setSsrcCache(simGroup.ssrcs);
}
const fidGroups
= ssrcInfo.groups.filter(
groupInfo => groupInfo.semantics === 'FID');
if (fidGroups) {
const rtxSsrcMapping = new Map();
fidGroups.forEach(fidGroup => {
const primarySsrc = fidGroup.ssrcs[0];
const rtxSsrc = fidGroup.ssrcs[1];
rtxSsrcMapping.set(primarySsrc, rtxSsrc);
});
this.rtxModifier.setSsrcCache(rtxSsrcMapping);
}
}
}
let promiseChain = Promise.resolve();
// On Firefox, the encodings have to be configured on the sender only after the transceiver is created.
if (browser.isFirefox()) {
promiseChain = promiseChain.then(() => webrtcStream && this.tpcUtils.setEncodings(track));
}
return promiseChain;
};
/**
* Adds local track as part of the unmute operation.
* @param {JitsiLocalTrack} track the track to be added as part of the unmute operation.
*
* @return {Promise<boolean>} Promise that resolves to true if the underlying PeerConnection's
* state has changed and renegotiation is required, false if no renegotiation is needed or
* Promise is rejected when something goes wrong.
*/
TraceablePeerConnection.prototype.addTrackUnmute = function(track) {
logger.info(`${this} Adding track=${track} as unmute`);
if (!this._assertTrackBelongs('addTrackUnmute', track)) {
// Abort
return Promise.reject('Track not found on the peerconnection');
}
const webRtcStream = track.getOriginalStream();
if (!webRtcStream) {
logger.error(`${this} Unable to add track=${track} as unmute - no WebRTC stream`);
return Promise.reject('Stream not found');
}
if (this._usesUnifiedPlan) {
return this.tpcUtils.replaceTrack(null, track).then(() => this.isP2P);
}
this._addStream(webRtcStream);
return Promise.resolve(true);
};
/**
* Adds WebRTC media stream to the underlying PeerConnection
* @param {MediaStream} mediaStream
* @private
*/
TraceablePeerConnection.prototype._addStream = function(mediaStream) {
this.peerconnection.addStream(mediaStream);
this._addedStreams.push(mediaStream);
};
/**
* Removes WebRTC media stream from the underlying PeerConection
* @param {MediaStream} mediaStream
*/
TraceablePeerConnection.prototype._removeStream = function(mediaStream) {
this.peerconnection.removeStream(mediaStream);
this._addedStreams
= this._addedStreams.filter(stream => stream !== mediaStream);
};
/**
* This method when called will check if given <tt>localTrack</tt> belongs to
* this TPC (that it has been previously added using {@link addTrack}). If the
* track does not belong an error message will be logged.
* @param {string} methodName the method name that will be logged in an error
* message
* @param {JitsiLocalTrack} localTrack
* @return {boolean} <tt>true</tt> if given local track belongs to this TPC or
* <tt>false</tt> otherwise.
* @private
*/
TraceablePeerConnection.prototype._assertTrackBelongs = function(
methodName,
localTrack) {
const doesBelong = this.localTracks.has(localTrack?.rtcId);
if (!doesBelong) {
logger.error(`${this} ${methodName}: track=${localTrack} does not belong to pc`);
}
return doesBelong;
};
/**
* Returns the codec that is configured on the client as the preferred video codec.
* This takes into account the current order of codecs in the local description sdp.
*
* @returns {CodecMimeType} The codec that is set as the preferred codec to receive
* video in the local SDP.
*/
TraceablePeerConnection.prototype.getConfiguredVideoCodec = function() {
const sdp = this.peerconnection.localDescription?.sdp;
const defaultCodec = CodecMimeType.VP8;
if (!sdp) {
return defaultCodec;
}
const parsedSdp = transform.parse(sdp);
const mLine = parsedSdp.media.find(m => m.type === MediaType.VIDEO);
const codec = mLine.rtp[0].codec;
if (codec) {
return Object.values(CodecMimeType).find(value => value === codec.toLowerCase());
}
return defaultCodec;
};
/**
* Enables or disables simulcast for screenshare based on the frame rate requested for desktop track capture.
*
* @param {number} maxFps framerate to be used for desktop track capture.
*/
TraceablePeerConnection.prototype.setDesktopSharingFrameRate = function(maxFps) {
const lowFps = maxFps <= SS_DEFAULT_FRAME_RATE;
this._capScreenshareBitrate = this.isSimulcastOn() && lowFps;
};
/**
* Sets the codec preference on the peerconnection. The codec preference goes into effect when
* the next renegotiation happens.
*
* @param {CodecMimeType} preferredCodec the preferred codec.
* @param {CodecMimeType} disabledCodec the codec that needs to be disabled.
* @returns {void}
*/
TraceablePeerConnection.prototype.setVideoCodecs = function(preferredCodec = null, disabledCodec = null) {
// If both enable and disable are set, disable settings will prevail.
const enable = disabledCodec === null;
const mimeType = disabledCodec ? disabledCodec : preferredCodec;
if (this.codecPreference && (preferredCodec || disabledCodec)) {
this.codecPreference.enable = enable;
this.codecPreference.mimeType = mimeType;
} else if (preferredCodec || disabledCodec) {
this.codecPreference = {
enable,
mediaType: MediaType.VIDEO,
mimeType
};
} else {
logger.warn(`${this} Invalid codec settings[preferred=${preferredCodec},disabled=${disabledCodec}],
atleast one value is needed`);
}
};
/**
* Tells if the given WebRTC <tt>MediaStream</tt> has been added to
* the underlying WebRTC PeerConnection.
* @param {MediaStream} mediaStream
* @returns {boolean}
*/
TraceablePeerConnection.prototype.isMediaStreamInPc = function(mediaStream) {
return this._addedStreams.indexOf(mediaStream) > -1;
};
/**
* Remove local track from this TPC.
* @param {JitsiLocalTrack} localTrack the track to be removed from this TPC.
*
* FIXME It should probably remove a boolean just like {@link removeTrackMute}
* The same applies to addTrack.
*/
TraceablePeerConnection.prototype.removeTrack = function(localTrack) {
const webRtcStream = localTrack.getOriginalStream();
this.trace(
'removeStream',
localTrack.rtcId, webRtcStream ? webRtcStream.id : undefined);
if (!this._assertTrackBelongs('removeStream', localTrack)) {
// Abort - nothing to be done here
return;
}
this.localTracks.delete(localTrack.rtcId);
this.localSSRCs.delete(localTrack.rtcId);
if (webRtcStream) {
this.peerconnection.removeStream(webRtcStream);
}
};
/**
* Returns the sender corresponding to the given media type.
* @param {MEDIA_TYPE} mediaType - The media type 'audio' or 'video' to be used for the search.
* @returns {RTPSender|undefined} - The found sender or undefined if no sender
* was found.
*/
TraceablePeerConnection.prototype.findSenderByKind = function(mediaType) {
return this.peerconnection.getSenders().find(s => s.track && s.track.kind === mediaType);
};
/**
* Returns the receiver corresponding to the given MediaStreamTrack.
*
* @param {MediaSreamTrack} track - The media stream track used for the search.
* @returns {RTCRtpReceiver|undefined} - The found receiver or undefined if no receiver
* was found.
*/
TraceablePeerConnection.prototype.findReceiverForTrack = function(track) {
return this.peerconnection.getReceivers().find(r => r.track === track);
};
/**
* Returns the sender corresponding to the given MediaStreamTrack.
*
* @param {MediaSreamTrack} track - The media stream track used for the search.
* @returns {RTCRtpSender|undefined} - The found sender or undefined if no sender
* was found.
*/
TraceablePeerConnection.prototype.findSenderForTrack = function(track) {
return this.peerconnection.getSenders().find(s => s.track === track);
};
/**
* Replaces <tt>oldTrack</tt> with <tt>newTrack</tt> from the peer connection.
* Either <tt>oldTrack</tt> or <tt>newTrack</tt> can be null; replacing a valid
* <tt>oldTrack</tt> with a null <tt>newTrack</tt> effectively just removes
* <tt>oldTrack</tt>
*
* @param {JitsiLocalTrack|null} oldTrack - The current track in use to be replaced on the pc.
* @param {JitsiLocalTrack|null} newTrack - The new track to be used.
*
* @returns {Promise<boolean>} - If the promise resolves with true, renegotiation will be needed.
* Otherwise no renegotiation is needed.
*/
TraceablePeerConnection.prototype.replaceTrack = function(oldTrack, newTrack) {
if (!(oldTrack || newTrack)) {
logger.info(`${this} replaceTrack called with no new track and no old track`);
return Promise.resolve();
}
// If a track is being added to the peerconnection for the first time, we want the source signaling to be sent to
// Jicofo before the mute state is sent over presence. Therefore, trigger a renegotiation in this case. If we
// rely on "negotiationneeded" fired by the browser to signal new ssrcs, the mute state in presence will be sent
// before the source signaling which is undesirable.
const negotiationNeeded = Boolean(!oldTrack || !this.localTracks.has(oldTrack?.rtcId));
if (this._usesUnifiedPlan) {
logger.debug(`${this} TPC.replaceTrack using unified plan`);
const mediaType = newTrack?.getType() ?? oldTrack?.getType();
const stream = newTrack?.getOriginalStream();
const promise = newTrack && !stream
// Ignore cases when the track is replaced while the device is in a muted state.
// The track will be replaced again on the peerconnection when the user unmutes.
? Promise.resolve()
: this.tpcUtils.replaceTrack(oldTrack, newTrack);
const transceiver = this.tpcUtils.findTransceiver(mediaType, oldTrack);
return promise
.then(() => {
oldTrack && this.localTracks.delete(oldTrack.rtcId);
newTrack && this.localTracks.set(newTrack.rtcId, newTrack);
if (transceiver) {
// Set the transceiver direction.
transceiver.direction = newTrack ? MediaDirection.SENDRECV : MediaDirection.RECVONLY;
}
// Avoid configuring the encodings on Chromium/Safari until simulcast is configured
// for the newly added track using SDP munging which happens during the renegotiation.
const configureEncodingsPromise = browser.usesSdpMungingForSimulcast() || !newTrack
? Promise.resolve()
: this.tpcUtils.setEncodings(newTrack);
// Renegotiate only in the case of P2P. We rely on 'negotiationeeded' to be fired for JVB.
return configureEncodingsPromise.then(() => this.isP2P || negotiationNeeded);
});
}
logger.debug(`${this} TPC.replaceTrack using plan B`);
let promiseChain = Promise.resolve();
if (oldTrack) {
this.removeTrack(oldTrack);
}
if (newTrack) {
promiseChain = this.addTrack(newTrack);
}
return promiseChain.then(() => true);
};
/**
* Removes local track as part of the mute operation.
* @param {JitsiLocalTrack} localTrack the local track to be remove as part of
* the mute operation.
* @return {Promise<boolean>} Promise that resolves to true if the underlying PeerConnection's
* state has changed and renegotiation is required, false if no renegotiation is needed or
* Promise is rejected when something goes wrong.
*/
TraceablePeerConnection.prototype.removeTrackMute = function(localTrack) {
const webRtcStream = localTrack.getOriginalStream();
this.trace(
'removeStreamMute',
localTrack.rtcId, webRtcStream ? webRtcStream.id : null);
if (!this._assertTrackBelongs('removeStreamMute', localTrack)) {
// Abort - nothing to be done here
return Promise.reject('Track not found in the peerconnection');
}
if (this._usesUnifiedPlan) {
return this.tpcUtils.replaceTrack(localTrack, null);
}
if (webRtcStream) {
logger.info(`${this} Removing track=${localTrack} as mute`);
this._removeStream(webRtcStream);
return Promise.resolve(true);
}
logger.error(`${this} removeStreamMute - no WebRTC stream for track=${localTrack}`);
return Promise.reject('Stream not found');
};
TraceablePeerConnection.prototype.createDataChannel = function(label, opts) {
this.trace('createDataChannel', label, opts);
return this.peerconnection.createDataChannel(label, opts);
};
/**
* Ensures that the simulcast ssrc-group appears after any other ssrc-groups
* in the SDP so that simulcast is properly activated.
*
* @param {Object} localSdp the WebRTC session description instance for
* the local description.
* @private
*/
TraceablePeerConnection.prototype._ensureSimulcastGroupIsLast = function(localSdp) {
let sdpStr = localSdp.sdp;
const videoStartIndex = sdpStr.indexOf('m=video');
const simStartIndex = sdpStr.indexOf('a=ssrc-group:SIM', videoStartIndex);
let otherStartIndex = sdpStr.lastIndexOf('a=ssrc-group');
if (simStartIndex === -1
|| otherStartIndex === -1
|| otherStartIndex === simStartIndex) {
return localSdp;
}
const simEndIndex = sdpStr.indexOf('\r\n', simStartIndex);
const simStr = sdpStr.substring(simStartIndex, simEndIndex + 2);
sdpStr = sdpStr.replace(simStr, '');
otherStartIndex = sdpStr.lastIndexOf('a=ssrc-group');
const otherEndIndex = sdpStr.indexOf('\r\n', otherStartIndex);
const sdpHead = sdpStr.slice(0, otherEndIndex);
const simStrTrimmed = simStr.trim();
const sdpTail = sdpStr.slice(otherEndIndex);
sdpStr = `${sdpHead}\r\n${simStrTrimmed}${sdpTail}`;
return new RTCSessionDescription({
type: localSdp.type,
sdp: sdpStr
});
};
/**
* Will adjust audio and video media direction in the given SDP object to
* reflect the current status of the {@link audioTransferActive} and
* {@link videoTransferActive} flags.
* @param {RTCSessionDescription} localDescription the WebRTC session description instance for
* the local description.
* @private
*/
TraceablePeerConnection.prototype._adjustLocalMediaDirection = function(localDescription) {
const transformer = new SdpTransformWrap(localDescription.sdp);
let modifiedDirection = false;
const audioMedia = transformer.selectMedia(MediaType.AUDIO);
if (audioMedia) {
const desiredAudioDirection = this.getDesiredMediaDirection(MediaType.AUDIO);
if (audioMedia.direction !== desiredAudioDirection) {
audioMedia.direction = desiredAudioDirection;
logger.info(`${this} Adjusted local audio direction to ${desiredAudioDirection}`);
modifiedDirection = true;
}
} else {
logger.warn(`${this} No "audio" media found in the local description`);
}
const videoMedia = transformer.selectMedia(MediaType.VIDEO);
if (videoMedia) {
const desiredVideoDirection = this.getDesiredMediaDirection(MediaType.VIDEO);
if (videoMedia.direction !== desiredVideoDirection) {
videoMedia.direction = desiredVideoDirection;
logger.info(`${this} Adjusted local video direction to ${desiredVideoDirection}`);
modifiedDirection = true;
}
} else {
logger.warn(`${this} No "video" media found in the local description`);
}
if (modifiedDirection) {
return new RTCSessionDescription({
type: localDescription.type,
sdp: transformer.toRawSDP()
});
}
return localDescription;
};
/**
* Adjusts the media direction on the remote description based on availability of local and remote sources in a p2p
* media connection.
*
* @param {RTCSessionDescription} remoteDescription the WebRTC session description instance for the remote description.
* @returns the transformed remoteDescription.
* @private
*/
TraceablePeerConnection.prototype._adjustRemoteMediaDirection = function(remoteDescription) {
const transformer = new SdpTransformWrap(remoteDescription.sdp);
[ MediaType.AUDIO, MediaType.VIDEO ].forEach(mediaType => {
const media = transformer.selectMedia(mediaType);
const hasLocalSource = this.hasAnyTracksOfType(mediaType);
const hasRemoteSource = this.getRemoteTracks(null, mediaType).length > 0;
media.direction = hasLocalSource && hasRemoteSource
? MediaDirection.SENDRECV
: hasLocalSource
? MediaDirection.RECVONLY
: hasRemoteSource ? MediaDirection.SENDONLY : MediaDirection.INACTIVE;
});
return new RTCSessionDescription({
type: remoteDescription.type,
sdp: transformer.toRawSDP()
});
};
/**
* Munges the stereo flag as well as the opusMaxAverageBitrate in the SDP, based
* on values set through config.js, if present.
*
* @param {RTCSessionDescription} description that needs to be munged.
* @returns {RTCSessionDescription} the munged description.
*/
TraceablePeerConnection.prototype._mungeOpus = function(description) {
const { audioQuality } = this.options;
if (!audioQuality?.stereo && !audioQuality?.opusMaxAverageBitrate) {
return description;
}
const parsedSdp = transform.parse(description.sdp);
const mLines = parsedSdp.media;
for (const mLine of mLines) {
if (mLine.type === 'audio') {
const { payload } = mLine.rtp.find(protocol => protocol.codec === CodecMimeType.OPUS);
if (!payload) {
// eslint-disable-next-line no-continue
continue;
}
let fmtpOpus = mLine.fmtp.find(protocol => protocol.payload === payload);
if (!fmtpOpus) {
fmtpOpus = {
payload,
config: ''
};
}
const fmtpConfig = transform.parseParams(fmtpOpus.config);
let sdpChanged = false;
if (audioQuality?.stereo) {
fmtpConfig.stereo = 1;
sdpChanged = true;
}
if (audioQuality?.opusMaxAverageBitrate) {
fmtpConfig.maxaveragebitrate = audioQuality.opusMaxAverageBitrate;
sdpChanged = true;
}
if (!sdpChanged) {
// eslint-disable-next-line no-continue
continue;
}
let mungedConfig = '';
for (const key of Object.keys(fmtpConfig)) {
mungedConfig += `${key}=${fmtpConfig[key]}; `;
}
fmtpOpus.config = mungedConfig.trim();
}
}
return new RTCSessionDescription({
type: description.type,
sdp: transform.write(parsedSdp)
});
};
/**
* Configures the stream encodings depending on the video type and the bitrates configured.
*
* @returns {Promise} promise that will be resolved when the operation is successful and rejected otherwise.
*/
TraceablePeerConnection.prototype.configureSenderVideoEncodings = function() {
return this.setSenderVideoConstraints(this._senderVideoMaxHeight);
};
TraceablePeerConnection.prototype.setLocalDescription = function(description) {
let localDescription = description;
this.trace('setLocalDescription::preTransform', dumpSDP(localDescription));
// Munge stereo flag and opusMaxAverageBitrate based on config.js
localDescription = this._mungeOpus(localDescription);
if (!this._usesUnifiedPlan) {
localDescription = this._adjustLocalMediaDirection(localDescription);
localDescription = this._ensureSimulcastGroupIsLast(localDescription);
}
// Munge the order of the codecs based on the preferences set through config.js if we are using SDP munging.
if (!this._usesTransceiverCodecPreferences) {
localDescription = this._mungeCodecOrder(localDescription);
}
this.trace('setLocalDescription::postTransform', dumpSDP(localDescription));
return new Promise((resolve, reject) => {
this.peerconnection.setLocalDescription(localDescription)
.then(() => {
this.trace('setLocalDescriptionOnSuccess');
const localUfrag = SDPUtil.getUfrag(localDescription.sdp);
if (localUfrag !== this.localUfrag) {
this.localUfrag = localUfrag;
this.eventEmitter.emit(RTCEvents.LOCAL_UFRAG_CHANGED, this, localUfrag);
}
resolve();
}, err => {
this.trace('setLocalDescriptionOnFailure', err);
this.eventEmitter.emit(RTCEvents.SET_LOCAL_DESCRIPTION_FAILED, err, this);
reject(err);
});
});
};
/**
* Enables/disables audio media transmission on this peer connection. When
* disabled the SDP audio media direction in the local SDP will be adjusted to
* 'inactive' which means that no data will be sent nor accepted, but
* the connection should be kept alive.
* @param {boolean} active <tt>true</tt> to enable audio media transmission or
* <tt>false</tt> to disable. If the value is not a boolean the call will have
* no effect.
* @return {boolean} <tt>true</tt> if the value has changed and sRD/sLD cycle
* needs to be executed in order for the changes to take effect or
* <tt>false</tt> if the given value was the same as the previous one.
* @public
*/
TraceablePeerConnection.prototype.setAudioTransferActive = function(active) {
logger.debug(`${this} audio transfer active: ${active}`);
const changed = this.audioTransferActive !== active;
this.audioTransferActive = active;
if (this._usesUnifiedPlan) {
this.tpcUtils.setAudioTransferActive(active);
// false means no renegotiation up the chain which is not needed in the Unified mode
return false;
}
return changed;
};
TraceablePeerConnection.prototype.setRemoteDescription = function(description) {
this.trace('setRemoteDescription::preTransform', dumpSDP(description));
/* eslint-disable no-param-reassign */
// Munge stereo flag and opusMaxAverageBitrate based on config.js
description = this._mungeOpus(description);
/* eslint-enable no-param-reassign */
if (!this._usesUnifiedPlan) {
// TODO the focus should squeze or explode the remote simulcast
if (this.isSimulcastOn()) {
// eslint-disable-next-line no-param-reassign
description = this.simulcast.mungeRemoteDescription(description, true /* add x-google-conference flag */);
this.trace(
'setRemoteDescription::postTransform (simulcast)',
dumpSDP(description));
}
// eslint-disable-next-line no-param-reassign
description = normalizePlanB(description);
} else if (!this.isP2P) {
const currentDescription = this.peerconnection.remoteDescription;
// eslint-disable-next-line no-param-reassign
description = this.interop.toUnifiedPlan(description, currentDescription);
this.trace(
'setRemoteDescription::postTransform (Unified)',
dumpSDP(description));
if (this.isSimulcastOn()) {
// eslint-disable-next-line no-param-reassign
description = this.simulcast.mungeRemoteDescription(description);
// eslint-disable-next-line no-param-reassign
description = this.tpcUtils.insertUnifiedPlanSimulcastReceive(description);
this.trace(
'setRemoteDescription::postTransform (sim receive)',
dumpSDP(description));
}
}
// Munge the order of the codecs based on the preferences set through config.js.
// eslint-disable-next-line no-param-reassign
description = this._mungeCodecOrder(description);
if (this._usesUnifiedPlan) {
// eslint-disable-next-line no-param-reassign
description = this.tpcUtils.ensureCorrectOrderOfSsrcs(description);
}
return new Promise((resolve, reject) => {
this.peerconnection.setRemoteDescription(description)
.then(() => {
this.trace('setRemoteDescriptionOnSuccess');
const remoteUfrag = SDPUtil.getUfrag(description.sdp);
if (remoteUfrag !== this.remoteUfrag) {
this.remoteUfrag = remoteUfrag;
this.eventEmitter.emit(
RTCEvents.REMOTE_UFRAG_CHANGED, this, remoteUfrag);
}
resolve();
}, err => {
this.trace('setRemoteDescriptionOnFailure', err);
this.eventEmitter.emit(
RTCEvents.SET_REMOTE_DESCRIPTION_FAILED,
err,
this);
reject(err);
});
});
};
/**
* Changes the resolution of the video stream that is sent to the peer based on the resolution requested by the peer
* and user preference, sets the degradation preference on the sender based on the video type, configures the maximum
* bitrates on the send stream.
*
* @param {number} frameHeight - The max frame height to be imposed on the outgoing video stream.
* @returns {Promise} promise that will be resolved when the operation is successful and rejected otherwise.
*/
TraceablePeerConnection.prototype.setSenderVideoConstraints = function(frameHeight) {
if (frameHeight < 0) {
throw new Error(`Invalid frameHeight: ${frameHeight}`);
}
// XXX: This is not yet supported on mobile.
if (browser.isReactNative()) {
return Promise.resolve();
}
this._senderVideoMaxHeight = frameHeight;
const localVideoTrack = this.getLocalVideoTrack();
if (!localVideoTrack || localVideoTrack.isMuted()) {
return Promise.resolve();
}
const videoSender = this.findSenderByKind(MediaType.VIDEO);
if (!videoSender) {
return Promise.resolve();
}
const parameters = videoSender.getParameters();
if (!parameters?.encodings?.length) {
return Promise.resolve();
}
// Set the degradation preference.
const preference = this.isSharingLowFpsScreen()
? DEGRADATION_PREFERENCE_DESKTOP // Prefer resolution for low fps share.
: DEGRADATION_PREFERENCE_CAMERA; // Prefer frame-rate for high fps share and camera.
parameters.degradationPreference = preference;
logger.info(`${this} Setting degradation preference [preference=${preference},track=${localVideoTrack}`);
// Calculate the encodings active state based on the resolution requested by the bridge.
this.encodingsEnabledState = this.tpcUtils.calculateEncodingsActiveState(localVideoTrack, frameHeight);
const maxBitrates = this.tpcUtils.calculateEncodingsBitrates(localVideoTrack);
const videoType = localVideoTrack.getVideoType();
if (this.isSimulcastOn()) {
for (const encoding in parameters.encodings) {
if (parameters.encodings.hasOwnProperty(encoding)) {
parameters.encodings[encoding].active = this.encodingsEnabledState[encoding];
// Firefox doesn't follow the spec and lets application specify the degradation preference on the
// encodings.
browser.isFirefox() && (parameters.encodings[encoding].degradationPreference = preference);
// Max bitrates are configured on the encodings only for VP8.
if (this.getConfiguredVideoCodec() === CodecMimeType.VP8
&& (this.options?.videoQuality?.maxBitratesVideo
|| this.isSharingLowFpsScreen()
|| this._usesUnifiedPlan)) {
parameters.encodings[encoding].maxBitrate = maxBitrates[encoding];
}
}
}
this.tpcUtils.updateEncodingsResolution(parameters);
// For p2p and cases and where simulcast is explicitly disabled.
} else if (frameHeight > 0) {
let scaleFactor = HD_SCALE_FACTOR;
// Do not scale down encodings for desktop tracks for non-simulcast case.
if (videoType === VideoType.CAMERA && localVideoTrack.resolution > frameHeight) {
scaleFactor = Math.floor(localVideoTrack.resolution / frameHeight);
}
parameters.encodings[0].active = true;
parameters.encodings[0].scaleResolutionDownBy = scaleFactor;
// Firefox doesn't follow the spec and lets application specify the degradation preference on the encodings.
browser.isFirefox() && (parameters.encodings[0].degradationPreference = preference);
// Configure the bitrate.
if (this.getConfiguredVideoCodec() === CodecMimeType.VP8 && this.options?.videoQuality?.maxBitratesVideo) {
let bitrate = this.getTargetVideoBitrates()?.high;
if (videoType === VideoType.CAMERA) {
bitrate = this.tpcUtils.localStreamEncodingsConfig
.find(layer => layer.scaleResolutionDownBy === scaleFactor)?.maxBitrate ?? bitrate;
}
parameters.encodings[0].maxBitrate = bitrate;
}
} else {
parameters.encodings[0].active = false;
}
logger.info(`${this} setting max height=${frameHeight},encodings=${JSON.stringify(parameters.encodings)}`);
return videoSender.setParameters(parameters).then(() => {
localVideoTrack.maxEnabledResolution = frameHeight;
this.eventEmitter.emit(RTCEvents.LOCAL_TRACK_MAX_ENABLED_RESOLUTION_CHANGED, localVideoTrack);
});
};
/**
* Enables/disables video media transmission on this peer connection. When
* disabled the SDP video media direction in the local SDP will be adjusted to
* 'inactive' which means that no data will be sent nor accepted, but
* the connection should be kept alive.
* @param {boolean} active <tt>true</tt> to enable video media transmission or
* <tt>false</tt> to disable. If the value is not a boolean the call will have
* no effect.
* @return {boolean} <tt>true</tt> if the value has changed and sRD/sLD cycle
* needs to be executed in order for the changes to take effect or
* <tt>false</tt> if the given value was the same as the previous one.
* @public
*/
TraceablePeerConnection.prototype.setVideoTransferActive = function(active) {
logger.debug(`${this} video transfer active: ${active}`);
const changed = this.videoTransferActive !== active;
this.videoTransferActive = active;
if (this._usesUnifiedPlan) {
this.tpcUtils.setVideoTransferActive(active);
// false means no renegotiation up the chain which is not needed in the Unified mode
return false;
}
return changed;
};
/**
* Sends DTMF tones if possible.
*
* @param {string} tones - The DTMF tones string as defined by {@code RTCDTMFSender.insertDTMF}, 'tones' argument.
* @param {number} duration - The amount of time in milliseconds that each DTMF should last. It's 200ms by default.
* @param {number} interToneGap - The length of time in miliseconds to wait between tones. It's 200ms by default.
*
* @returns {void}
*/
TraceablePeerConnection.prototype.sendTones = function(tones, duration = 200, interToneGap = 200) {
if (!this._dtmfSender) {
if (this.peerconnection.getSenders) {
const rtpSender = this.peerconnection.getSenders().find(s => s.dtmf);
this._dtmfSender = rtpSender && rtpSender.dtmf;
this._dtmfSender && logger.info(`${this} initialized DTMFSender using getSenders`);
}
if (!this._dtmfSender) {
const localAudioTrack = Array.from(this.localTracks.values()).find(t => t.isAudioTrack());
if (this.peerconnection.createDTMFSender && localAudioTrack) {
this._dtmfSender = this.peerconnection.createDTMFSender(localAudioTrack.getTrack());
}
this._dtmfSender && logger.info(`${this} initialized DTMFSender using deprecated createDTMFSender`);
}
if (this._dtmfSender) {
this._dtmfSender.ontonechange = this._onToneChange.bind(this);
}
}
if (this._dtmfSender) {
if (this._dtmfSender.toneBuffer) {
this._dtmfTonesQueue.push({
tones,
duration,
interToneGap
});
return;
}
this._dtmfSender.insertDTMF(tones, duration, interToneGap);
} else {
logger.warn(`${this} sendTones - failed to select DTMFSender`);
}
};
/**
* Callback ivoked by {@code this._dtmfSender} when it has finished playing
* a single tone.
*
* @param {Object} event - The tonechange event which indicates what characters
* are left to be played for the current tone.
* @private
* @returns {void}
*/
TraceablePeerConnection.prototype._onToneChange = function(event) {
// An empty event.tone indicates the current tones have finished playing.
// Automatically start playing any queued tones on finish.
if (this._dtmfSender && event.tone === '' && this._dtmfTonesQueue.length) {
const { tones, duration, interToneGap } = this._dtmfTonesQueue.shift();
this._dtmfSender.insertDTMF(tones, duration, interToneGap);
}
};
/**
* Makes the underlying TraceablePeerConnection generate new SSRC for
* the recvonly video stream.
*/
TraceablePeerConnection.prototype.generateRecvonlySsrc = function() {
const newSSRC = SDPUtil.generateSsrc();
logger.info(`${this} generated new recvonly SSRC=${newSSRC}`);
this.sdpConsistency.setPrimarySsrc(newSSRC);
};
/**
* Makes the underlying TraceablePeerConnection forget the current primary video
* SSRC.
*/
TraceablePeerConnection.prototype.clearRecvonlySsrc = function() {
logger.info(`${this} Clearing primary video SSRC!`);
this.sdpConsistency.clearVideoSsrcCache();
};
/**
* Closes underlying WebRTC PeerConnection instance and removes all remote
* tracks by emitting {@link RTCEvents.REMOTE_TRACK_REMOVED} for each one of
* them.
*/
TraceablePeerConnection.prototype.close = function() {
this.trace('stop');
// Off SignalingEvents
this.signalingLayer.off(SignalingEvents.PEER_MUTED_CHANGED, this._peerMutedChanged);
this.signalingLayer.off(SignalingEvents.PEER_VIDEO_TYPE_CHANGED, this._peerVideoTypeChanged);
this._usesUnifiedPlan && this.peerconnection.removeEventListener('track', this.onTrack);
for (const peerTracks of this.remoteTracks.values()) {
for (const remoteTrack of peerTracks.values()) {
this._removeRemoteTrack(remoteTrack);
}
}
this.remoteTracks.clear();
this._addedStreams = [];
this._dtmfSender = null;
this._dtmfTonesQueue = [];
if (!this.rtc._removePeerConnection(this)) {
logger.error(`${this} RTC._removePeerConnection returned false`);
}
if (this.statsinterval !== null) {
window.clearInterval(this.statsinterval);
this.statsinterval = null;
}
logger.info(`${this} Closing peerconnection`);
this.peerconnection.close();
};
TraceablePeerConnection.prototype.createAnswer = function(constraints) {
return this._createOfferOrAnswer(false /* answer */, constraints);
};
TraceablePeerConnection.prototype.createOffer = function(constraints) {
return this._createOfferOrAnswer(true /* offer */, constraints);
};
TraceablePeerConnection.prototype._createOfferOrAnswer = function(
isOffer,
constraints) {
const logName = isOffer ? 'Offer' : 'Answer';
this.trace(`create${logName}`, JSON.stringify(constraints, null, ' '));
const handleSuccess = (resultSdp, resolveFn, rejectFn) => {
try {
this.trace(
`create${logName}OnSuccess::preTransform`, dumpSDP(resultSdp));
if (!this._usesUnifiedPlan) {
// If there are no local video tracks, then a "recvonly"
// SSRC needs to be generated
if (!this.hasAnyTracksOfType(MediaType.VIDEO)
&& !this.sdpConsistency.hasPrimarySsrcCached()) {
this.generateRecvonlySsrc();
}
// eslint-disable-next-line no-param-reassign
resultSdp = new RTCSessionDescription({
type: resultSdp.type,
sdp: this.sdpConsistency.makeVideoPrimarySsrcsConsistent(
resultSdp.sdp)
});
this.trace(
`create${logName}OnSuccess::postTransform `
+ '(make primary audio/video ssrcs consistent)',
dumpSDP(resultSdp));
}
const localVideoTrack = this.getLocalVideoTrack();
// Configure simulcast for camera tracks and for desktop tracks that need simulcast.
if (this.isSimulcastOn() && browser.usesSdpMungingForSimulcast()
&& (localVideoTrack?.getVideoType() === VideoType.CAMERA
|| this._usesUnifiedPlan
|| !this.isSharingLowFpsScreen())) {
// eslint-disable-next-line no-param-reassign
resultSdp = this.simulcast.mungeLocalDescription(resultSdp);
this.trace(
`create${logName}`
+ 'OnSuccess::postTransform (simulcast)',
dumpSDP(resultSdp));
}
if (!this.options.disableRtx && browser.usesSdpMungingForSimulcast()) {
// eslint-disable-next-line no-param-reassign
resultSdp = new RTCSessionDescription({
type: resultSdp.type,
sdp: this.rtxModifier.modifyRtxSsrcs(resultSdp.sdp)
});
this.trace(
`create${logName}`
+ 'OnSuccess::postTransform (rtx modifier)',
dumpSDP(resultSdp));
}
const ssrcMap = this._extractSSRCMap(resultSdp);
this._processLocalSSRCsMap(ssrcMap);
resolveFn(resultSdp);
} catch (e) {
this.trace(`create${logName}OnError`, e);
this.trace(`create${logName}OnError`, dumpSDP(resultSdp));
logger.error(`${this} create${logName}OnError`, e, dumpSDP(resultSdp));
rejectFn(e);
}
};
const handleFailure = (err, rejectFn) => {
this.trace(`create${logName}OnFailure`, err);
const eventType
= isOffer
? RTCEvents.CREATE_OFFER_FAILED
: RTCEvents.CREATE_ANSWER_FAILED;
this.eventEmitter.emit(eventType, err, this);
rejectFn(err);
};
// Set the codec preference before creating an offer or answer so that the generated SDP will have
// the correct preference order.
if (this._usesTransceiverCodecPreferences) {
const transceiver = this.peerconnection.getTransceivers()
.find(t => t.receiver && t.receiver?.track?.kind === MediaType.VIDEO);
if (transceiver) {
let capabilities = RTCRtpReceiver.getCapabilities(MediaType.VIDEO)?.codecs;
const mimeType = this.codecPreference?.mimeType;
const enable = this.codecPreference?.enable;
if (capabilities && mimeType && enable) {
// Move the desired codec (all variations of it as well) to the beginning of the list.
/* eslint-disable-next-line arrow-body-style */
capabilities.sort(caps => {
return caps.mimeType.toLowerCase() === `${MediaType.VIDEO}/${mimeType}` ? -1 : 1;
});
} else if (capabilities && mimeType) {
capabilities = capabilities
.filter(caps => caps.mimeType.toLowerCase() !== `${MediaType.VIDEO}/${mimeType}`);
}
// Disable ulpfec on Google Chrome and derivatives because
// https://bugs.chromium.org/p/chromium/issues/detail?id=1276427
if (browser.isChromiumBased()) {
capabilities = capabilities
.filter(caps => caps.mimeType.toLowerCase() !== `${MediaType.VIDEO}/${CodecMimeType.ULPFEC}`);
}
try {
transceiver.setCodecPreferences(capabilities);
} catch (err) {
logger.warn(`${this} Setting codec[preference=${mimeType},enable=${enable}] failed`, err);
}
}
}
return new Promise((resolve, reject) => {
let oaPromise;
if (isOffer) {
oaPromise = this.peerconnection.createOffer(constraints);
} else {
oaPromise = this.peerconnection.createAnswer(constraints);
}
oaPromise
.then(
sdp => handleSuccess(sdp, resolve, reject),
error => handleFailure(error, reject));
});
};
/**
* Extract primary SSRC from given {@link TrackSSRCInfo} object.
* @param {TrackSSRCInfo} ssrcObj
* @return {number|null} the primary SSRC or <tt>null</tt>
*/
TraceablePeerConnection.prototype._extractPrimarySSRC = function(ssrcObj) {
if (ssrcObj && ssrcObj.groups && ssrcObj.groups.length) {
return ssrcObj.groups[0].ssrcs[0];
} else if (ssrcObj && ssrcObj.ssrcs && ssrcObj.ssrcs.length) {
return ssrcObj.ssrcs[0];
}
return null;
};
/**
* Goes over the SSRC map extracted from the latest local description and tries
* to match them with the local tracks (by MSID). Will update the values
* currently stored in the {@link TraceablePeerConnection.localSSRCs} map.
* @param {Map<string,TrackSSRCInfo>} ssrcMap
* @private
*/
TraceablePeerConnection.prototype._processLocalSSRCsMap = function(ssrcMap) {
for (const track of this.localTracks.values()) {
const sourceIdentifier = this._usesUnifiedPlan ? track.getType() : track.storedMSID;
if (ssrcMap.has(sourceIdentifier)) {
const newSSRC = ssrcMap.get(sourceIdentifier);
if (!newSSRC) {
logger.error(`${this} No SSRC found for stream=${sourceIdentifier}`);
return;
}
const oldSSRC = this.localSSRCs.get(track.rtcId);
const newSSRCNum = this._extractPrimarySSRC(newSSRC);
const oldSSRCNum = this._extractPrimarySSRC(oldSSRC);
// eslint-disable-next-line no-negated-condition
if (newSSRCNum !== oldSSRCNum) {
oldSSRCNum && logger.error(`${this} Overwriting SSRC for track=${track}] with ssrc=${newSSRC}`);
this.localSSRCs.set(track.rtcId, newSSRC);
this.eventEmitter.emit(RTCEvents.LOCAL_TRACK_SSRC_UPDATED, track, newSSRCNum);
}
} else if (!track.isVideoTrack() && !track.isMuted()) {
// It is normal to find no SSRCs for a muted video track in
// the local SDP as the recv-only SSRC is no longer munged in.
// So log the warning only if it's not a muted video track.
logger.warn(`${this} No SSRCs found in the local SDP for track=${track}, stream=${sourceIdentifier}`);
}
}
};
TraceablePeerConnection.prototype.addIceCandidate = function(candidate) {
this.trace('addIceCandidate', JSON.stringify({
candidate: candidate.candidate,
sdpMid: candidate.sdpMid,
sdpMLineIndex: candidate.sdpMLineIndex,
usernameFragment: candidate.usernameFragment
}, null, ' '));
return this.peerconnection.addIceCandidate(candidate);
};
/**
* Returns the number of simulcast streams that are currently enabled on the peerconnection.
*
* @returns {number} The number of simulcast streams currently enabled or 1 when simulcast is disabled.
*/
TraceablePeerConnection.prototype.getActiveSimulcastStreams = function() {
let activeStreams = 1;
if (this.isSimulcastOn() && this.encodingsEnabledState) {
activeStreams = this.encodingsEnabledState.filter(stream => Boolean(stream))?.length;
} else if (this.isSimulcastOn()) {
activeStreams = SIM_LAYER_RIDS.length;
}
return activeStreams;
};
/**
* Obtains call-related stats from the peer connection.
*
* @returns {Promise<Object>} Promise which resolves with data providing statistics about
* the peerconnection.
*/
TraceablePeerConnection.prototype.getStats = function() {
return this.peerconnection.getStats();
};
/**
* Generates and stores new SSRC info object for given local track.
* The method should be called only for a video track being added to this TPC
* in the muted state (given that the current browser uses this strategy).
* @param {JitsiLocalTrack} track
* @return {TPCSSRCInfo}
*/
TraceablePeerConnection.prototype.generateNewStreamSSRCInfo = function(track) {
const rtcId = track.rtcId;
let ssrcInfo = this._getSSRC(rtcId);
if (ssrcInfo) {
logger.error(`${this} Overwriting local SSRCs for track id=${rtcId}`);
}
// Configure simulcast for camera tracks and desktop tracks that need simulcast.
if (this.isSimulcastOn()
&& (track.getVideoType() === VideoType.CAMERA || !this.isSharingLowFpsScreen())) {
ssrcInfo = {
ssrcs: [],
groups: []
};
for (let i = 0; i < SIM_LAYER_RIDS.length; i++) {
ssrcInfo.ssrcs.push(SDPUtil.generateSsrc());
}
ssrcInfo.groups.push({
ssrcs: ssrcInfo.ssrcs.slice(),
semantics: 'SIM'
});
} else {
ssrcInfo = {
ssrcs: [ SDPUtil.generateSsrc() ],
groups: []
};
}
if (!this.options.disableRtx) {
// Specifically use a for loop here because we'll
// be adding to the list we're iterating over, so we
// only want to iterate through the items originally
// on the list
const currNumSsrcs = ssrcInfo.ssrcs.length;
for (let i = 0; i < currNumSsrcs; ++i) {
const primarySsrc = ssrcInfo.ssrcs[i];
const rtxSsrc = SDPUtil.generateSsrc();
ssrcInfo.ssrcs.push(rtxSsrc);
ssrcInfo.groups.push({
ssrcs: [ primarySsrc, rtxSsrc ],
semantics: 'FID'
});
}
}
ssrcInfo.msid = track.storedMSID;
this.localSSRCs.set(rtcId, ssrcInfo);
return ssrcInfo;
};
/**
* Returns if the peer connection uses Unified plan implementation.
*
* @returns {boolean} True if the pc uses Unified plan, false otherwise.
*/
TraceablePeerConnection.prototype.usesUnifiedPlan = function() {
return this._usesUnifiedPlan;
};
/**
* Creates a text representation of this <tt>TraceablePeerConnection</tt>
* instance.
* @return {string}
*/
TraceablePeerConnection.prototype.toString = function() {
return `TPC[id=${this.id},type=${this.isP2P ? 'P2P' : 'JVB'}]`;
};
| modules/RTC/TraceablePeerConnection.js | import { getLogger } from '@jitsi/logger';
import { Interop } from '@jitsi/sdp-interop';
import transform from 'sdp-transform';
import * as CodecMimeType from '../../service/RTC/CodecMimeType';
import MediaDirection from '../../service/RTC/MediaDirection';
import * as MediaType from '../../service/RTC/MediaType';
import RTCEvents from '../../service/RTC/RTCEvents';
import * as SignalingEvents from '../../service/RTC/SignalingEvents';
import { getSourceNameForJitsiTrack } from '../../service/RTC/SignalingLayer';
import * as VideoType from '../../service/RTC/VideoType';
import { SS_DEFAULT_FRAME_RATE } from '../RTC/ScreenObtainer';
import browser from '../browser';
import FeatureFlags from '../flags/FeatureFlags';
import LocalSdpMunger from '../sdp/LocalSdpMunger';
import RtxModifier from '../sdp/RtxModifier';
import SDP from '../sdp/SDP';
import SDPUtil from '../sdp/SDPUtil';
import SdpConsistency from '../sdp/SdpConsistency';
import { SdpTransformWrap } from '../sdp/SdpTransformUtil';
import * as GlobalOnErrorHandler from '../util/GlobalOnErrorHandler';
import JitsiRemoteTrack from './JitsiRemoteTrack';
import RTC from './RTC';
import RTCUtils from './RTCUtils';
import {
HD_BITRATE,
HD_SCALE_FACTOR,
SIM_LAYER_RIDS,
TPCUtils
} from './TPCUtils';
// FIXME SDP tools should end up in some kind of util module
const logger = getLogger(__filename);
const DEGRADATION_PREFERENCE_CAMERA = 'maintain-framerate';
const DEGRADATION_PREFERENCE_DESKTOP = 'maintain-resolution';
/* eslint-disable max-params */
/**
* Creates new instance of 'TraceablePeerConnection'.
*
* @param {RTC} rtc the instance of <tt>RTC</tt> service
* @param {number} id the peer connection id assigned by the parent RTC module.
* @param {SignalingLayer} signalingLayer the signaling layer instance
* @param {object} pcConfig The {@code RTCConfiguration} to use for the WebRTC peer connection.
* @param {object} constraints WebRTC 'PeerConnection' constraints
* @param {boolean} isP2P indicates whether or not the new instance will be used in a peer to peer connection.
* @param {object} options <tt>TracablePeerConnection</tt> config options.
* @param {boolean} options.disableSimulcast if set to 'true' will disable the simulcast.
* @param {boolean} options.disableRtx if set to 'true' will disable the RTX.
* @param {string} options.disabledCodec the mime type of the code that should not be negotiated on the peerconnection.
* @param {string} options.preferredCodec the mime type of the codec that needs to be made the preferred codec for the
* peerconnection.
* @param {boolean} options.startSilent If set to 'true' no audio will be sent or received.
* @param {boolean} options.usesUnifiedPlan Indicates if the browser is running in unified plan mode.
*
* FIXME: initially the purpose of TraceablePeerConnection was to be able to
* debug the peer connection. Since many other responsibilities have been added
* it would make sense to extract a separate class from it and come up with
* a more suitable name.
*
* @constructor
*/
export default function TraceablePeerConnection(
rtc,
id,
signalingLayer,
pcConfig,
constraints,
isP2P,
options) {
/**
* Indicates whether or not this peer connection instance is actively
* sending/receiving audio media. When set to <tt>false</tt> the SDP audio
* media direction will be adjusted to 'inactive' in order to suspend
* the transmission.
* @type {boolean}
* @private
*/
this.audioTransferActive = !(options.startSilent === true);
/**
* The DTMF sender instance used to send DTMF tones.
*
* @type {RTCDTMFSender|undefined}
* @private
*/
this._dtmfSender = undefined;
/**
* @typedef {Object} TouchToneRequest
* @property {string} tones - The DTMF tones string as defined by
* {@code RTCDTMFSender.insertDTMF}, 'tones' argument.
* @property {number} duration - The amount of time in milliseconds that
* each DTMF should last.
* @property {string} interToneGap - The length of time in miliseconds to
* wait between tones.
*/
/**
* TouchToneRequests which are waiting to be played. This queue is filled
* if there are touch tones currently being played.
*
* @type {Array<TouchToneRequest>}
* @private
*/
this._dtmfTonesQueue = [];
/**
* Indicates whether or not this peer connection instance is actively
* sending/receiving video media. When set to <tt>false</tt> the SDP video
* media direction will be adjusted to 'inactive' in order to suspend
* the transmission.
* @type {boolean}
* @private
*/
this.videoTransferActive = true;
/**
* The parent instance of RTC service which created this
* <tt>TracablePeerConnection</tt>.
* @type {RTC}
*/
this.rtc = rtc;
/**
* The peer connection identifier assigned by the RTC module.
* @type {number}
*/
this.id = id;
/**
* Indicates whether or not this instance is used in a peer to peer
* connection.
* @type {boolean}
*/
this.isP2P = isP2P;
// FIXME: We should support multiple streams per jid.
/**
* The map holds remote tracks associated with this peer connection.
* It maps user's JID to media type and remote track
* (one track per media type per user's JID).
* @type {Map<string, Map<MediaType, JitsiRemoteTrack>>}
*/
this.remoteTracks = new Map();
/**
* A map which stores local tracks mapped by {@link JitsiLocalTrack.rtcId}
* @type {Map<number, JitsiLocalTrack>}
*/
this.localTracks = new Map();
/**
* Keeps tracks of the WebRTC <tt>MediaStream</tt>s that have been added to
* the underlying WebRTC PeerConnection.
* @type {Array}
* @private
*/
this._addedStreams = [];
/**
* @typedef {Object} TPCGroupInfo
* @property {string} semantics the SSRC groups semantics
* @property {Array<number>} ssrcs group's SSRCs in order where the first
* one is group's primary SSRC, the second one is secondary (RTX) and so
* on...
*/
/**
* @typedef {Object} TPCSSRCInfo
* @property {Array<number>} ssrcs an array which holds all track's SSRCs
* @property {Array<TPCGroupInfo>} groups an array stores all track's SSRC
* groups
*/
/**
* Holds the info about local track's SSRCs mapped per their
* {@link JitsiLocalTrack.rtcId}
* @type {Map<number, TPCSSRCInfo>}
*/
this.localSSRCs = new Map();
/**
* The local ICE username fragment for this session.
*/
this.localUfrag = null;
/**
* The remote ICE username fragment for this session.
*/
this.remoteUfrag = null;
/**
* The signaling layer which operates this peer connection.
* @type {SignalingLayer}
*/
this.signalingLayer = signalingLayer;
// SignalingLayer listeners
this._peerVideoTypeChanged = this._peerVideoTypeChanged.bind(this);
this.signalingLayer.on(
SignalingEvents.PEER_VIDEO_TYPE_CHANGED,
this._peerVideoTypeChanged);
this._peerMutedChanged = this._peerMutedChanged.bind(this);
this.signalingLayer.on(
SignalingEvents.PEER_MUTED_CHANGED,
this._peerMutedChanged);
this.options = options;
// Make sure constraints is properly formatted in order to provide information about whether or not this
// connection is P2P to rtcstats.
const safeConstraints = constraints || {};
safeConstraints.optional = safeConstraints.optional || [];
// The `optional` parameter needs to be of type array, otherwise chrome will throw an error.
// Firefox and Safari just ignore it.
if (Array.isArray(safeConstraints.optional)) {
safeConstraints.optional.push({ rtcStatsSFUP2P: this.isP2P });
} else {
logger.warn('Optional param is not an array, rtcstats p2p data is omitted.');
}
this.peerconnection = new RTCUtils.RTCPeerConnectionType(pcConfig, safeConstraints);
this.tpcUtils = new TPCUtils(this);
this.updateLog = [];
this.stats = {};
this.statsinterval = null;
/**
* Flag used to indicate if simulcast is turned off and a cap of 500 Kbps is applied on screensharing.
*/
this._capScreenshareBitrate = this.options.capScreenshareBitrate;
/**
* Flag used to indicate if the browser is running in unified plan mode.
*/
this._usesUnifiedPlan = options.usesUnifiedPlan;
/**
* Flag used to indicate if RTCRtpTransceiver#setCodecPreferences is to be used instead of SDP
* munging for codec selection.
*/
this._usesTransceiverCodecPreferences = browser.supportsCodecPreferences() && this._usesUnifiedPlan;
this._usesTransceiverCodecPreferences
&& logger.info('Using RTCRtpTransceiver#setCodecPreferences for codec selection');
/**
* @type {number} The max number of stats to keep in this.stats. Limit to
* 300 values, i.e. 5 minutes; set to 0 to disable
*/
this.maxstats = options.maxstats;
this.interop = new Interop();
const Simulcast = require('@jitsi/sdp-simulcast');
this.simulcast = new Simulcast(
{
numOfLayers: SIM_LAYER_RIDS.length,
explodeRemoteSimulcast: false,
usesUnifiedPlan: this._usesUnifiedPlan
});
this.sdpConsistency = new SdpConsistency(this.toString());
/**
* Munges local SDP provided to the Jingle Session in order to prevent from
* sending SSRC updates on attach/detach and mute/unmute (for video).
* @type {LocalSdpMunger}
*/
this.localSdpMunger = new LocalSdpMunger(this, this.rtc.getLocalEndpointId());
/**
* TracablePeerConnection uses RTC's eventEmitter
* @type {EventEmitter}
*/
this.eventEmitter = rtc.eventEmitter;
this.rtxModifier = new RtxModifier();
/**
* The height constraint applied on the video sender. The default value is 2160 (4K) when layer suspension is
* explicitly disabled.
*/
this._senderVideoMaxHeight = 2160;
// override as desired
this.trace = (what, info) => {
logger.debug(what, info);
this.updateLog.push({
time: new Date(),
type: what,
value: info || ''
});
};
this.onicecandidate = null;
this.peerconnection.onicecandidate = event => {
this.trace(
'onicecandidate',
JSON.stringify(event.candidate, null, ' '));
if (this.onicecandidate !== null) {
this.onicecandidate(event);
}
};
// Use track events when browser is running in unified plan mode and stream events in plan-b mode.
if (this._usesUnifiedPlan) {
this.onTrack = evt => {
const stream = evt.streams[0];
this._remoteTrackAdded(stream, evt.track, evt.transceiver);
stream.addEventListener('removetrack', e => {
this._remoteTrackRemoved(stream, e.track);
});
};
this.peerconnection.addEventListener('track', this.onTrack);
} else {
this.peerconnection.onaddstream = event => this._remoteStreamAdded(event.stream);
this.peerconnection.onremovestream = event => this._remoteStreamRemoved(event.stream);
}
this.onsignalingstatechange = null;
this.peerconnection.onsignalingstatechange = event => {
this.trace('onsignalingstatechange', this.signalingState);
if (this.onsignalingstatechange !== null) {
this.onsignalingstatechange(event);
}
};
this.oniceconnectionstatechange = null;
this.peerconnection.oniceconnectionstatechange = event => {
this.trace('oniceconnectionstatechange', this.iceConnectionState);
if (this.oniceconnectionstatechange !== null) {
this.oniceconnectionstatechange(event);
}
};
this.onnegotiationneeded = null;
this.peerconnection.onnegotiationneeded = event => {
this.trace('onnegotiationneeded');
if (this.onnegotiationneeded !== null) {
this.onnegotiationneeded(event);
}
};
this.onconnectionstatechange = null;
this.peerconnection.onconnectionstatechange = event => {
this.trace('onconnectionstatechange', this.connectionState);
if (this.onconnectionstatechange !== null) {
this.onconnectionstatechange(event);
}
};
this.ondatachannel = null;
this.peerconnection.ondatachannel = event => {
this.trace('ondatachannel');
if (this.ondatachannel !== null) {
this.ondatachannel(event);
}
};
if (this.maxstats) {
this.statsinterval = window.setInterval(() => {
this.getStats().then(stats => {
if (typeof stats?.result === 'function') {
const results = stats.result();
for (let i = 0; i < results.length; ++i) {
const res = results[i];
res.names().forEach(name => {
this._processStat(res, name, res.stat(name));
});
}
} else {
stats.forEach(r => this._processStat(r, '', r));
}
});
}, 1000);
}
logger.info(`Create new ${this}`);
}
/* eslint-enable max-params */
/**
* Process stat and adds it to the array of stats we store.
* @param report the current stats report.
* @param name the name of the report, if available
* @param statValue the value to add.
* @private
*/
TraceablePeerConnection.prototype._processStat
= function(report, name, statValue) {
const id = `${report.id}-${name}`;
let s = this.stats[id];
const now = new Date();
if (!s) {
this.stats[id] = s = {
startTime: now,
endTime: now,
values: [],
times: []
};
}
s.values.push(statValue);
s.times.push(now.getTime());
if (s.values.length > this.maxstats) {
s.values.shift();
s.times.shift();
}
s.endTime = now;
};
/**
* Returns a string representation of a SessionDescription object.
*/
const dumpSDP = function(description) {
if (typeof description === 'undefined' || description === null) {
return '';
}
return `type: ${description.type}\r\n${description.sdp}`;
};
/**
* Forwards the {@link peerconnection.iceConnectionState} state except that it
* will convert "completed" into "connected" where both mean that the ICE has
* succeeded and is up and running. We never see "completed" state for
* the JVB connection, but it started appearing for the P2P one. This method
* allows to adapt old logic to this new situation.
* @return {string}
*/
TraceablePeerConnection.prototype.getConnectionState = function() {
const state = this.peerconnection.iceConnectionState;
if (state === 'completed') {
return 'connected';
}
return state;
};
/**
* Obtains the media direction for given {@link MediaType}. The method takes
* into account whether or not there are any local tracks for media and
* the {@link audioTransferActive} and {@link videoTransferActive} flags.
* @param {MediaType} mediaType
* @param {boolean} isAddOperation whether the direction is to be calculated after a source-add action.
* @return {string} one of the SDP direction constants ('sendrecv, 'recvonly'
* etc.) which should be used when setting local description on the peer
* connection.
* @private
*/
TraceablePeerConnection.prototype.getDesiredMediaDirection = function(mediaType, isAddOperation = false) {
const hasLocalSource = this.hasAnyTracksOfType(mediaType);
if (this._usesUnifiedPlan) {
return isAddOperation
? hasLocalSource ? MediaDirection.SENDRECV : MediaDirection.SENDONLY
: hasLocalSource ? MediaDirection.RECVONLY : MediaDirection.INACTIVE;
}
const mediaTransferActive = mediaType === MediaType.AUDIO ? this.audioTransferActive : this.videoTransferActive;
if (mediaTransferActive) {
return hasLocalSource ? MediaDirection.SENDRECV : MediaDirection.RECVONLY;
}
return MediaDirection.INACTIVE;
};
/**
* Returns the list of RTCRtpReceivers created for the source of the given media type associated with
* the set of remote endpoints specified.
* @param {Array<string>} endpoints list of the endpoints
* @param {string} mediaType 'audio' or 'video'
* @returns {Array<RTCRtpReceiver>} list of receivers created by the peerconnection.
*/
TraceablePeerConnection.prototype._getReceiversByEndpointIds = function(endpoints, mediaType) {
let remoteTracks = [];
let receivers = [];
for (const endpoint of endpoints) {
remoteTracks = remoteTracks.concat(this.getRemoteTracks(endpoint, mediaType));
}
// Get the ids of the MediaStreamTracks associated with each of these remote tracks.
const remoteTrackIds = remoteTracks.map(remote => remote.track?.id);
receivers = this.peerconnection.getReceivers()
.filter(receiver => receiver.track
&& receiver.track.kind === mediaType
&& remoteTrackIds.find(trackId => trackId === receiver.track.id));
return receivers;
};
/**
* Tells whether or not this TPC instance is using Simulcast.
* @return {boolean} <tt>true</tt> if simulcast is enabled and active or
* <tt>false</tt> if it's turned off.
*/
TraceablePeerConnection.prototype.isSimulcastOn = function() {
return !this.options.disableSimulcast;
};
/**
* Handles {@link SignalingEvents.PEER_VIDEO_TYPE_CHANGED}
* @param {string} endpointId the video owner's ID (MUC nickname)
* @param {VideoType} videoType the new value
* @private
*/
TraceablePeerConnection.prototype._peerVideoTypeChanged = function(
endpointId,
videoType) {
// Check if endpointId has a value to avoid action on random track
if (!endpointId) {
logger.error(`${this} No endpointID on peerVideoTypeChanged`);
return;
}
const videoTrack = this.getRemoteTracks(endpointId, MediaType.VIDEO);
if (videoTrack.length) {
// NOTE 1 track per media type is assumed
videoTrack[0]._setVideoType(videoType);
}
};
/**
* Handles remote track mute / unmute events.
* @param {string} endpointId the track owner's identifier (MUC nickname)
* @param {MediaType} mediaType "audio" or "video"
* @param {boolean} isMuted the new mute state
* @private
*/
TraceablePeerConnection.prototype._peerMutedChanged = function(
endpointId,
mediaType,
isMuted) {
// Check if endpointId is a value to avoid doing action on all remote tracks
if (!endpointId) {
logger.error(`${this} On peerMuteChanged - no endpoint ID`);
return;
}
const track = this.getRemoteTracks(endpointId, mediaType);
if (track.length) {
// NOTE 1 track per media type is assumed
track[0].setMute(isMuted);
}
};
/**
* Obtains audio levels of the remote audio tracks by getting the source information on the RTCRtpReceivers.
* The information relevant to the ssrc is updated each time a RTP packet constaining the ssrc is received.
* @param {Array<string>} speakerList list of endpoint ids for which audio levels are to be gathered.
* @returns {Object} containing ssrc and audio level information as a key-value pair.
*/
TraceablePeerConnection.prototype.getAudioLevels = function(speakerList = []) {
const audioLevels = {};
const audioReceivers = speakerList.length
? this._getReceiversByEndpointIds(speakerList, MediaType.AUDIO)
: this.peerconnection.getReceivers()
.filter(receiver => receiver.track && receiver.track.kind === MediaType.AUDIO && receiver.track.enabled);
audioReceivers.forEach(remote => {
const ssrc = remote.getSynchronizationSources();
if (ssrc && ssrc.length) {
// As per spec, this audiolevel is a value between 0..1 (linear), where 1.0
// represents 0 dBov, 0 represents silence, and 0.5 represents approximately
// 6 dBSPL change in the sound pressure level from 0 dBov.
// https://www.w3.org/TR/webrtc/#dom-rtcrtpcontributingsource-audiolevel
audioLevels[ssrc[0].source] = ssrc[0].audioLevel;
}
});
return audioLevels;
};
/**
* Obtains local tracks for given {@link MediaType}. If the <tt>mediaType</tt>
* argument is omitted the list of all local tracks will be returned.
* @param {MediaType} [mediaType]
* @return {Array<JitsiLocalTrack>}
*/
TraceablePeerConnection.prototype.getLocalTracks = function(mediaType) {
let tracks = Array.from(this.localTracks.values());
if (mediaType !== undefined) {
tracks = tracks.filter(track => track.getType() === mediaType);
}
return tracks;
};
/**
* Retrieves the local video track.
*
* @returns {JitsiLocalTrack|undefined} - local video track.
*/
TraceablePeerConnection.prototype.getLocalVideoTrack = function() {
return this.getLocalTracks(MediaType.VIDEO)[0];
};
/**
* Checks whether or not this {@link TraceablePeerConnection} instance contains
* any local tracks for given <tt>mediaType</tt>.
* @param {MediaType} mediaType
* @return {boolean}
*/
TraceablePeerConnection.prototype.hasAnyTracksOfType = function(mediaType) {
if (!mediaType) {
throw new Error('"mediaType" is required');
}
return this.getLocalTracks(mediaType).length > 0;
};
/**
* Obtains all remote tracks currently known to this PeerConnection instance.
* @param {string} [endpointId] the track owner's identifier (MUC nickname)
* @param {MediaType} [mediaType] the remote tracks will be filtered
* by their media type if this argument is specified.
* @return {Array<JitsiRemoteTrack>}
*/
TraceablePeerConnection.prototype.getRemoteTracks = function(
endpointId,
mediaType) {
const remoteTracks = [];
const endpoints
= endpointId ? [ endpointId ] : this.remoteTracks.keys();
for (const endpoint of endpoints) {
const endpointTrackMap = this.remoteTracks.get(endpoint);
if (!endpointTrackMap) {
// Otherwise an empty Map() would have to be allocated above
// eslint-disable-next-line no-continue
continue;
}
for (const trackMediaType of endpointTrackMap.keys()) {
// per media type filtering
if (!mediaType || mediaType === trackMediaType) {
const mediaTrack = endpointTrackMap.get(trackMediaType);
if (mediaTrack) {
remoteTracks.push(mediaTrack);
}
}
}
}
return remoteTracks;
};
/**
* Parses the remote description and returns the sdp lines of the sources associated with a remote participant.
*
* @param {string} id Endpoint id of the remote participant.
* @returns {Array<string>} The sdp lines that have the ssrc information.
*/
TraceablePeerConnection.prototype.getRemoteSourceInfoByParticipant = function(id) {
const removeSsrcInfo = [];
const remoteTracks = this.getRemoteTracks(id);
if (!remoteTracks?.length) {
return removeSsrcInfo;
}
const primarySsrcs = remoteTracks.map(track => track.getSSRC());
const sdp = new SDP(this.remoteDescription.sdp);
primarySsrcs.forEach((ssrc, idx) => {
for (const media of sdp.media) {
let lines = '';
let ssrcLines = SDPUtil.findLines(media, `a=ssrc:${ssrc}`);
if (ssrcLines.length) {
if (!removeSsrcInfo[idx]) {
removeSsrcInfo[idx] = '';
}
// Check if there are any FID groups present for the primary ssrc.
const fidLines = SDPUtil.findLines(media, `a=ssrc-group:FID ${ssrc}`);
if (fidLines.length) {
const secondarySsrc = fidLines[0].split(' ')[2];
lines += `${fidLines[0]}\r\n`;
ssrcLines = ssrcLines.concat(SDPUtil.findLines(media, `a=ssrc:${secondarySsrc}`));
}
removeSsrcInfo[idx] += `${ssrcLines.join('\r\n')}\r\n`;
removeSsrcInfo[idx] += lines;
}
}
});
return removeSsrcInfo;
};
/**
* Returns the target bitrates configured for the local video source.
*
* @returns {Object}
*/
TraceablePeerConnection.prototype.getTargetVideoBitrates = function() {
const currentCodec = this.getConfiguredVideoCodec();
return this.tpcUtils.videoBitrates[currentCodec.toUpperCase()] || this.tpcUtils.videoBitrates;
};
/**
* Tries to find {@link JitsiTrack} for given SSRC number. It will search both
* local and remote tracks bound to this instance.
* @param {number} ssrc
* @return {JitsiTrack|null}
*/
TraceablePeerConnection.prototype.getTrackBySSRC = function(ssrc) {
if (typeof ssrc !== 'number') {
throw new Error(`SSRC ${ssrc} is not a number`);
}
for (const localTrack of this.localTracks.values()) {
if (this.getLocalSSRC(localTrack) === ssrc) {
return localTrack;
}
}
for (const remoteTrack of this.getRemoteTracks()) {
if (remoteTrack.getSSRC() === ssrc) {
return remoteTrack;
}
}
return null;
};
/**
* Tries to find SSRC number for given {@link JitsiTrack} id. It will search
* both local and remote tracks bound to this instance.
* @param {string} id
* @return {number|null}
*/
TraceablePeerConnection.prototype.getSsrcByTrackId = function(id) {
const findTrackById = track => track.getTrack().id === id;
const localTrack = this.getLocalTracks().find(findTrackById);
if (localTrack) {
return this.getLocalSSRC(localTrack);
}
const remoteTrack = this.getRemoteTracks().find(findTrackById);
if (remoteTrack) {
return remoteTrack.getSSRC();
}
return null;
};
/**
* Called when new remote MediaStream is added to the PeerConnection.
* @param {MediaStream} stream the WebRTC MediaStream for remote participant
*/
TraceablePeerConnection.prototype._remoteStreamAdded = function(stream) {
const streamId = RTC.getStreamID(stream);
if (!RTC.isUserStreamById(streamId)) {
logger.info(`${this} ignored remote 'stream added' event for non-user stream[id=${streamId}]`);
return;
}
// Bind 'addtrack'/'removetrack' event handlers
if (browser.isChromiumBased()) {
stream.onaddtrack = event => {
this._remoteTrackAdded(stream, event.track);
};
stream.onremovetrack = event => {
this._remoteTrackRemoved(stream, event.track);
};
}
// Call remoteTrackAdded for each track in the stream
const streamAudioTracks = stream.getAudioTracks();
for (const audioTrack of streamAudioTracks) {
this._remoteTrackAdded(stream, audioTrack);
}
const streamVideoTracks = stream.getVideoTracks();
for (const videoTrack of streamVideoTracks) {
this._remoteTrackAdded(stream, videoTrack);
}
};
/**
* Called on "track added" and "stream added" PeerConnection events (because we
* handle streams on per track basis). Finds the owner and the SSRC for
* the track and passes that to ChatRoom for further processing.
* @param {MediaStream} stream the WebRTC MediaStream instance which is
* the parent of the track
* @param {MediaStreamTrack} track the WebRTC MediaStreamTrack added for remote
* participant.
* @param {RTCRtpTransceiver} transceiver the WebRTC transceiver that is created
* for the remote participant in unified plan.
*/
TraceablePeerConnection.prototype._remoteTrackAdded = function(stream, track, transceiver = null) {
const streamId = RTC.getStreamID(stream);
const mediaType = track.kind;
if (!this.isP2P && !RTC.isUserStreamById(streamId)) {
logger.info(`${this} ignored remote 'stream added' event for non-user stream[id=${streamId}]`);
return;
}
logger.info(`${this} adding remote track for stream[id=${streamId},type=${mediaType}]`);
// look up an associated JID for a stream id
if (!mediaType) {
GlobalOnErrorHandler.callErrorHandler(
new Error(
`MediaType undefined for remote track, stream id: ${streamId}`
));
// Abort
return;
}
const remoteSDP = this._usesUnifiedPlan
? new SDP(this.peerconnection.remoteDescription.sdp)
: new SDP(this.remoteDescription.sdp);
let mediaLines;
// In unified plan mode, find the matching mline using 'mid' if its availble, otherwise use the
// 'msid' attribute of the stream.
if (this._usesUnifiedPlan) {
if (transceiver && transceiver.mid) {
const mid = transceiver.mid;
mediaLines = remoteSDP.media.filter(mls => SDPUtil.findLine(mls, `a=mid:${mid}`));
} else {
mediaLines = remoteSDP.media.filter(mls => {
const msid = SDPUtil.findLine(mls, 'a=msid:');
return typeof msid !== 'undefined' && streamId === msid.substring(7).split(' ')[0];
});
}
} else {
mediaLines = remoteSDP.media.filter(mls => mls.startsWith(`m=${mediaType}`));
}
if (!mediaLines.length) {
GlobalOnErrorHandler.callErrorHandler(
new Error(`No media lines found in remote SDP for remote stream[id=${streamId},type=${mediaType}]`));
// Abort
return;
}
let ssrcLines = SDPUtil.findLines(mediaLines[0], 'a=ssrc:');
ssrcLines
= ssrcLines.filter(line => line.indexOf(`msid:${streamId}`) !== -1);
if (!ssrcLines.length) {
GlobalOnErrorHandler.callErrorHandler(
new Error(`No SSRC lines found in remote SDP for remote stream[msid=${streamId},type=${mediaType}]`));
// Abort
return;
}
// FIXME the length of ssrcLines[0] not verified, but it will fail
// with global error handler anyway
const ssrcStr = ssrcLines[0].substring(7).split(' ')[0];
const trackSsrc = Number(ssrcStr);
const ownerEndpointId = this.signalingLayer.getSSRCOwner(trackSsrc);
if (isNaN(trackSsrc) || trackSsrc < 0) {
GlobalOnErrorHandler.callErrorHandler(
new Error(
`Invalid SSRC for remote stream[ssrc=${trackSsrc},id=${streamId},type=${mediaType}]`));
// Abort
return;
} else if (!ownerEndpointId) {
GlobalOnErrorHandler.callErrorHandler(
new Error(
`No SSRC owner known for remote stream[ssrc=${trackSsrc},id=${streamId},type=${mediaType}]`));
// Abort
return;
}
let sourceName;
if (FeatureFlags.isSourceNameSignalingEnabled()) {
sourceName = this.signalingLayer.getTrackSourceName(trackSsrc);
// If source name was not signaled, we'll generate one which allows testing signaling
// when mixing legacy(mobile) with new clients.
if (!sourceName) {
sourceName = getSourceNameForJitsiTrack(ownerEndpointId, mediaType, 0);
}
}
// eslint-disable-next-line no-undef
logger.info(`${this} creating remote track[endpoint=${ownerEndpointId},ssrc=${trackSsrc},`
+ `type=${mediaType},sourceName=${sourceName}]`);
const peerMediaInfo
= this.signalingLayer.getPeerMediaInfo(ownerEndpointId, mediaType);
if (!peerMediaInfo) {
GlobalOnErrorHandler.callErrorHandler(
new Error(`${this}: no peer media info available for ${ownerEndpointId}`));
return;
}
const muted = peerMediaInfo.muted;
const videoType = peerMediaInfo.videoType; // can be undefined
// eslint-disable-next-line no-undef
this._createRemoteTrack(
ownerEndpointId, stream, track, mediaType, videoType, trackSsrc, muted, sourceName);
};
// FIXME cleanup params
/* eslint-disable max-params */
/**
* Initializes a new JitsiRemoteTrack instance with the data provided by
* the signaling layer and SDP.
*
* @param {string} ownerEndpointId the owner's endpoint ID (MUC nickname)
* @param {MediaStream} stream the WebRTC stream instance
* @param {MediaStreamTrack} track the WebRTC track instance
* @param {MediaType} mediaType the track's type of the media
* @param {VideoType} [videoType] the track's type of the video (if applicable)
* @param {number} ssrc the track's main SSRC number
* @param {boolean} muted the initial muted status
* @param {String} sourceName the track's source name
*/
TraceablePeerConnection.prototype._createRemoteTrack = function(
ownerEndpointId,
stream,
track,
mediaType,
videoType,
ssrc,
muted,
sourceName) {
let remoteTracksMap = this.remoteTracks.get(ownerEndpointId);
if (!remoteTracksMap) {
remoteTracksMap = new Map();
this.remoteTracks.set(ownerEndpointId, remoteTracksMap);
}
const existingTrack = remoteTracksMap.get(mediaType);
if (existingTrack && existingTrack.getTrack() === track) {
// Ignore duplicated event which can originate either from 'onStreamAdded' or 'onTrackAdded'.
logger.info(`${this} ignored duplicated track event for track[endpoint=${ownerEndpointId},type=${mediaType}]`);
return;
} else if (existingTrack) {
logger.error(`${this} received a second remote track for track[endpoint=${ownerEndpointId},type=${mediaType}]`
+ 'deleting the existing track');
// The exisiting track needs to be removed here. We can get here when Jicofo reverses the order of source-add
// and source-remove messages. Ideally, when a remote endpoint changes source, like switching devices, it sends
// a source-remove (for old ssrc) followed by a source-add (for new ssrc) and Jicofo then should forward these
// two messages to all the other endpoints in the conference in the same order. However, sometimes, these
// messages arrive at the client in the reverse order resulting in two remote tracks (of same media type) being
// created and in case of video, a black strip (that of the first track which has ended) appears over the live
// track obscuring it. Removing the existing track when that happens will fix this issue.
this._remoteTrackRemoved(existingTrack.getOriginalStream(), existingTrack.getTrack());
}
const remoteTrack
= new JitsiRemoteTrack(
this.rtc,
this.rtc.conference,
ownerEndpointId,
stream,
track,
mediaType,
videoType,
ssrc,
muted,
this.isP2P,
sourceName);
remoteTracksMap.set(mediaType, remoteTrack);
this.eventEmitter.emit(RTCEvents.REMOTE_TRACK_ADDED, remoteTrack, this);
};
/* eslint-enable max-params */
/**
* Handles remote stream removal.
* @param stream the WebRTC MediaStream object which is being removed from the
* PeerConnection
*/
TraceablePeerConnection.prototype._remoteStreamRemoved = function(stream) {
if (!RTC.isUserStream(stream)) {
const id = RTC.getStreamID(stream);
logger.info(`Ignored remote 'stream removed' event for stream[id=${id}]`);
return;
}
// Call remoteTrackRemoved for each track in the stream
const streamVideoTracks = stream.getVideoTracks();
for (const videoTrack of streamVideoTracks) {
this._remoteTrackRemoved(stream, videoTrack);
}
const streamAudioTracks = stream.getAudioTracks();
for (const audioTrack of streamAudioTracks) {
this._remoteTrackRemoved(stream, audioTrack);
}
};
/**
* Handles remote media track removal.
* @param {MediaStream} stream WebRTC MediaStream instance which is the parent
* of the track.
* @param {MediaStreamTrack} track the WebRTC MediaStreamTrack which has been
* removed from the PeerConnection.
*/
TraceablePeerConnection.prototype._remoteTrackRemoved = function(
stream,
track) {
const streamId = RTC.getStreamID(stream);
const trackId = track && RTC.getTrackID(track);
if (!RTC.isUserStreamById(streamId)) {
logger.info(`${this} ignored remote 'stream removed' event for non-user stream[id=${streamId}]`);
return;
}
logger.info(`${this} remote track removed stream[id=${streamId},trackId=${trackId}]`);
if (!streamId) {
GlobalOnErrorHandler.callErrorHandler(new Error(`${this} remote track removal failed - no stream ID`));
return;
}
if (!trackId) {
GlobalOnErrorHandler.callErrorHandler(new Error(`${this} remote track removal failed - no track ID`));
return;
}
if (!this._removeRemoteTrackById(streamId, trackId)) {
// NOTE this warning is always printed when user leaves the room,
// because we remove remote tracks manually on MUC member left event,
// before the SSRCs are removed by Jicofo. In most cases it is fine to
// ignore this warning, but still it's better to keep it printed for
// debugging purposes.
//
// We could change the behaviour to emit track removed only from here,
// but the order of the events will change and consuming apps could
// behave unexpectedly (the "user left" event would come before "track
// removed" events).
logger.warn(`${this} Removed track not found for stream[id=${streamId},trackId=${trackId}]`);
}
};
/**
* Finds remote track by it's stream and track ids.
* @param {string} streamId the media stream id as defined by the WebRTC
* @param {string} trackId the media track id as defined by the WebRTC
* @return {JitsiRemoteTrack|undefined} the track's instance or
* <tt>undefined</tt> if not found.
* @private
*/
TraceablePeerConnection.prototype._getRemoteTrackById = function(
streamId,
trackId) {
// .find will break the loop once the first match is found
for (const endpointTrackMap of this.remoteTracks.values()) {
for (const mediaTrack of endpointTrackMap.values()) {
// FIXME verify and try to use ===
/* eslint-disable eqeqeq */
if (mediaTrack.getStreamId() == streamId
&& mediaTrack.getTrackId() == trackId) {
return mediaTrack;
}
/* eslint-enable eqeqeq */
}
}
return undefined;
};
/**
* Removes all JitsiRemoteTracks associated with given MUC nickname
* (resource part of the JID). Returns array of removed tracks.
*
* @param {string} owner - The resource part of the MUC JID.
* @returns {JitsiRemoteTrack[]}
*/
TraceablePeerConnection.prototype.removeRemoteTracks = function(owner) {
const removedTracks = [];
const remoteTracksMap = this.remoteTracks.get(owner);
if (remoteTracksMap) {
const removedAudioTrack = remoteTracksMap.get(MediaType.AUDIO);
const removedVideoTrack = remoteTracksMap.get(MediaType.VIDEO);
removedAudioTrack && removedTracks.push(removedAudioTrack);
removedVideoTrack && removedTracks.push(removedVideoTrack);
this.remoteTracks.delete(owner);
}
logger.debug(`${this} removed remote tracks[endpoint=${owner},count=${removedTracks.length}`);
return removedTracks;
};
/**
* Removes and disposes given <tt>JitsiRemoteTrack</tt> instance. Emits
* {@link RTCEvents.REMOTE_TRACK_REMOVED}.
* @param {JitsiRemoteTrack} toBeRemoved
*/
TraceablePeerConnection.prototype._removeRemoteTrack = function(toBeRemoved) {
toBeRemoved.dispose();
const participantId = toBeRemoved.getParticipantId();
const remoteTracksMap = this.remoteTracks.get(participantId);
if (!remoteTracksMap) {
logger.error(`${this} removeRemoteTrack: no remote tracks map for endpoint=${participantId}`);
} else if (!remoteTracksMap.delete(toBeRemoved.getType())) {
logger.error(`${this} Failed to remove ${toBeRemoved} - type mapping messed up ?`);
}
this.eventEmitter.emit(RTCEvents.REMOTE_TRACK_REMOVED, toBeRemoved);
};
/**
* Removes and disposes <tt>JitsiRemoteTrack</tt> identified by given stream and
* track ids.
*
* @param {string} streamId the media stream id as defined by the WebRTC
* @param {string} trackId the media track id as defined by the WebRTC
* @returns {JitsiRemoteTrack|undefined} the track which has been removed or
* <tt>undefined</tt> if no track matching given stream and track ids was
* found.
*/
TraceablePeerConnection.prototype._removeRemoteTrackById = function(
streamId,
trackId) {
const toBeRemoved = this._getRemoteTrackById(streamId, trackId);
if (toBeRemoved) {
this._removeRemoteTrack(toBeRemoved);
}
return toBeRemoved;
};
/**
* Returns a map with keys msid/mediaType and <tt>TrackSSRCInfo</tt> values.
* @param {RTCSessionDescription} desc the local description.
* @return {Map<string,TrackSSRCInfo>}
*/
TraceablePeerConnection.prototype._extractSSRCMap = function(desc) {
/**
* Track SSRC infos mapped by stream ID (msid) or mediaType (unfied-plan)
* @type {Map<string,TrackSSRCInfo>}
*/
const ssrcMap = new Map();
/**
* Groups mapped by primary SSRC number
* @type {Map<number,Array<SSRCGroupInfo>>}
*/
const groupsMap = new Map();
if (typeof desc !== 'object' || desc === null
|| typeof desc.sdp !== 'string') {
logger.warn('An empty description was passed as an argument');
return ssrcMap;
}
const session = transform.parse(desc.sdp);
if (!Array.isArray(session.media)) {
return ssrcMap;
}
let media = session.media;
// For unified plan clients, only the first audio and video mlines will have ssrcs for the local sources.
// The rest of the m-lines are for the recv-only sources, one for each remote source.
if (this._usesUnifiedPlan) {
media = [];
[ MediaType.AUDIO, MediaType.VIDEO ].forEach(mediaType => {
const mLine = session.media.find(m => m.type === mediaType);
mLine && media.push(mLine);
});
}
for (const mLine of media) {
if (!Array.isArray(mLine.ssrcs)) {
continue; // eslint-disable-line no-continue
}
if (Array.isArray(mLine.ssrcGroups)) {
for (const group of mLine.ssrcGroups) {
if (typeof group.semantics !== 'undefined'
&& typeof group.ssrcs !== 'undefined') {
// Parse SSRCs and store as numbers
const groupSSRCs = group.ssrcs.split(' ').map(ssrcStr => parseInt(ssrcStr, 10));
const primarySSRC = groupSSRCs[0];
// Note that group.semantics is already present
group.ssrcs = groupSSRCs;
// eslint-disable-next-line max-depth
if (!groupsMap.has(primarySSRC)) {
groupsMap.set(primarySSRC, []);
}
groupsMap.get(primarySSRC).push(group);
}
}
}
let ssrcs = mLine.ssrcs;
// Filter the ssrcs with 'msid' attribute for plan-b clients and 'cname' for unified-plan clients.
ssrcs = this._usesUnifiedPlan
? ssrcs.filter(s => s.attribute === 'cname')
: ssrcs.filter(s => s.attribute === 'msid');
for (const ssrc of ssrcs) {
// Use the mediaType as key for the source map for unified plan clients since msids are not part of
// the standard and the unified plan SDPs do not have a proper msid attribute for the sources.
// Also the ssrcs for sources do not change for Unified plan clients since RTCRtpSender#replaceTrack is
// used for switching the tracks so it is safe to use the mediaType as the key for the TrackSSRCInfo map.
const key = this._usesUnifiedPlan ? mLine.type : ssrc.value;
const ssrcNumber = ssrc.id;
let ssrcInfo = ssrcMap.get(key);
if (!ssrcInfo) {
ssrcInfo = {
ssrcs: [],
groups: [],
msid: key
};
ssrcMap.set(key, ssrcInfo);
}
ssrcInfo.ssrcs.push(ssrcNumber);
if (groupsMap.has(ssrcNumber)) {
const ssrcGroups = groupsMap.get(ssrcNumber);
for (const group of ssrcGroups) {
ssrcInfo.groups.push(group);
}
}
}
}
return ssrcMap;
};
/**
* Takes a SessionDescription object and returns a "normalized" version.
* Currently it takes care of ordering the a=ssrc lines and denoting receive
* only SSRCs.
*/
const normalizePlanB = function(desc) {
if (typeof desc !== 'object' || desc === null
|| typeof desc.sdp !== 'string') {
logger.warn('An empty description was passed as an argument');
return desc;
}
// eslint-disable-next-line no-shadow
const transform = require('sdp-transform');
const session = transform.parse(desc.sdp);
if (typeof session !== 'undefined'
&& typeof session.media !== 'undefined'
&& Array.isArray(session.media)) {
session.media.forEach(mLine => {
// Chrome appears to be picky about the order in which a=ssrc lines
// are listed in an m-line when rtx is enabled (and thus there are
// a=ssrc-group lines with FID semantics). Specifically if we have
// "a=ssrc-group:FID S1 S2" and the "a=ssrc:S2" lines appear before
// the "a=ssrc:S1" lines, SRD fails.
// So, put SSRC which appear as the first SSRC in an FID ssrc-group
// first.
const firstSsrcs = [];
const newSsrcLines = [];
if (typeof mLine.ssrcGroups !== 'undefined'
&& Array.isArray(mLine.ssrcGroups)) {
mLine.ssrcGroups.forEach(group => {
if (typeof group.semantics !== 'undefined'
&& group.semantics === 'FID') {
if (typeof group.ssrcs !== 'undefined') {
firstSsrcs.push(Number(group.ssrcs.split(' ')[0]));
}
}
});
}
if (Array.isArray(mLine.ssrcs)) {
let i;
for (i = 0; i < mLine.ssrcs.length; i++) {
if (typeof mLine.ssrcs[i] === 'object'
&& typeof mLine.ssrcs[i].id !== 'undefined'
&& firstSsrcs.indexOf(mLine.ssrcs[i].id) >= 0) {
newSsrcLines.push(mLine.ssrcs[i]);
delete mLine.ssrcs[i];
}
}
for (i = 0; i < mLine.ssrcs.length; i++) {
if (typeof mLine.ssrcs[i] !== 'undefined') {
newSsrcLines.push(mLine.ssrcs[i]);
}
}
mLine.ssrcs = replaceDefaultUnifiedPlanMsid(newSsrcLines);
}
});
}
const resStr = transform.write(session);
return new RTCSessionDescription({
type: desc.type,
sdp: resStr
});
};
/**
* Unified plan differentiates a remote track not associated with a stream using
* the msid "-", which can incorrectly trigger an onaddstream event in plan-b.
* For jitsi, these tracks are actually receive-only ssrcs. To prevent
* onaddstream from firing, remove the ssrcs with msid "-" except the cname
* line. Normally the ssrcs are not used by the client, as the bridge controls
* media flow, but keep one reference to the ssrc for the p2p case.
*
* @param {Array<Object>} ssrcLines - The ssrc lines from a remote description.
* @private
* @returns {Array<Object>} ssrcLines with removed lines referencing msid "-".
*/
function replaceDefaultUnifiedPlanMsid(ssrcLines = []) {
if (!browser.isChrome() || !browser.isVersionGreaterThan(70)) {
return ssrcLines;
}
let filteredLines = [ ...ssrcLines ];
const problematicSsrcIds = ssrcLines.filter(ssrcLine =>
ssrcLine.attribute === 'mslabel' && ssrcLine.value === '-')
.map(ssrcLine => ssrcLine.id);
problematicSsrcIds.forEach(ssrcId => {
// Find the cname which is to be modified and left in.
const cnameLine = filteredLines.find(line =>
line.id === ssrcId && line.attribute === 'cname');
cnameLine.value = `${MediaDirection.RECVONLY}-${ssrcId}`;
// Remove all of lines for the ssrc.
filteredLines
= filteredLines.filter(line => line.id !== ssrcId);
// But re-add the cname line so there is a reference kept to the ssrc
// in the SDP.
filteredLines.push(cnameLine);
});
return filteredLines;
}
/**
* Makes sure that both audio and video directions are configured as 'sendrecv'.
* @param {Object} localDescription the SDP object as defined by WebRTC.
* @param {object} options <tt>TracablePeerConnection</tt> config options.
*/
const enforceSendRecv = function(localDescription, options) {
if (!localDescription) {
throw new Error('No local description passed in.');
}
const transformer = new SdpTransformWrap(localDescription.sdp);
const audioMedia = transformer.selectMedia(MediaType.AUDIO);
let changed = false;
if (audioMedia && audioMedia.direction !== MediaDirection.SENDRECV) {
if (options.startSilent) {
audioMedia.direction = MediaDirection.INACTIVE;
} else {
audioMedia.direction = MediaDirection.SENDRECV;
}
changed = true;
}
const videoMedia = transformer.selectMedia(MediaType.VIDEO);
if (videoMedia && videoMedia.direction !== MediaDirection.SENDRECV) {
videoMedia.direction = MediaDirection.SENDRECV;
changed = true;
}
if (changed) {
return new RTCSessionDescription({
type: localDescription.type,
sdp: transformer.toRawSDP()
});
}
return localDescription;
};
/**
*
* @param {JitsiLocalTrack} localTrack
*/
TraceablePeerConnection.prototype.getLocalSSRC = function(localTrack) {
const ssrcInfo = this._getSSRC(localTrack.rtcId);
return ssrcInfo && ssrcInfo.ssrcs[0];
};
/**
* When doing unified plan simulcast, we'll have a set of ssrcs with the
* same msid but no ssrc-group, since unified plan signals the simulcast
* group via the a=simulcast line. Unfortunately, Jicofo will complain
* if it sees ssrcs with matching msids but no ssrc-group, so we'll inject
* an ssrc-group line to make Jicofo happy.
* @param desc A session description object (with 'type' and 'sdp' fields)
* @return A session description object with its sdp field modified to
* contain an inject ssrc-group for simulcast
*/
TraceablePeerConnection.prototype._injectSsrcGroupForUnifiedSimulcast
= function(desc) {
const sdp = transform.parse(desc.sdp);
const video = sdp.media.find(mline => mline.type === 'video');
// Check if the browser supports RTX, add only the primary ssrcs to the SIM group if that is the case.
video.ssrcGroups = video.ssrcGroups || [];
const fidGroups = video.ssrcGroups.filter(group => group.semantics === 'FID');
if (video.simulcast || video.simulcast_03) {
const ssrcs = [];
if (fidGroups && fidGroups.length) {
fidGroups.forEach(group => {
ssrcs.push(group.ssrcs.split(' ')[0]);
});
} else {
video.ssrcs.forEach(ssrc => {
if (ssrc.attribute === 'msid') {
ssrcs.push(ssrc.id);
}
});
}
if (video.ssrcGroups.find(group => group.semantics === 'SIM')) {
// Group already exists, no need to do anything
return desc;
}
video.ssrcGroups.push({
semantics: 'SIM',
ssrcs: ssrcs.join(' ')
});
}
return new RTCSessionDescription({
type: desc.type,
sdp: transform.write(sdp)
});
};
/* eslint-disable-next-line vars-on-top */
const getters = {
signalingState() {
return this.peerconnection.signalingState;
},
iceConnectionState() {
return this.peerconnection.iceConnectionState;
},
connectionState() {
return this.peerconnection.connectionState;
},
localDescription() {
let desc = this.peerconnection.localDescription;
if (!desc) {
logger.debug(`${this} getLocalDescription no localDescription found`);
return {};
}
this.trace('getLocalDescription::preTransform', dumpSDP(desc));
// If the browser is running in unified plan mode and this is a jvb connection,
// transform the SDP to Plan B first.
if (this._usesUnifiedPlan && !this.isP2P) {
desc = this.interop.toPlanB(desc);
this.trace('getLocalDescription::postTransform (Plan B)',
dumpSDP(desc));
desc = this._injectSsrcGroupForUnifiedSimulcast(desc);
this.trace('getLocalDescription::postTransform (inject ssrc group)',
dumpSDP(desc));
} else if (!this._usesUnifiedPlan) {
if (browser.doesVideoMuteByStreamRemove()) {
desc = this.localSdpMunger.maybeAddMutedLocalVideoTracksToSDP(desc);
logger.debug(
'getLocalDescription::postTransform (munge local SDP)', desc);
}
// What comes out of this getter will be signalled over Jingle to
// the other peer, so we need to make sure the media direction is
// 'sendrecv' because we won't change the direction later and don't want
// the other peer to think we can't send or receive.
//
// Note that the description we set in chrome does have the accurate
// direction (e.g. 'recvonly'), since that is technically what is
// happening (check setLocalDescription impl).
desc = enforceSendRecv(desc, this.options);
}
// See the method's doc for more info about this transformation.
desc = this.localSdpMunger.transformStreamIdentifiers(desc);
return desc;
},
remoteDescription() {
let desc = this.peerconnection.remoteDescription;
if (!desc) {
logger.debug(`${this} getRemoteDescription no remoteDescription found`);
return {};
}
this.trace('getRemoteDescription::preTransform', dumpSDP(desc));
if (this._usesUnifiedPlan) {
if (this.isP2P) {
// Adjust the media direction for p2p based on whether a local source has been added.
desc = this._adjustRemoteMediaDirection(desc);
} else {
// If this is a jvb connection, transform the SDP to Plan B first.
desc = this.interop.toPlanB(desc);
this.trace('getRemoteDescription::postTransform (Plan B)', dumpSDP(desc));
}
}
return desc;
}
};
Object.keys(getters).forEach(prop => {
Object.defineProperty(
TraceablePeerConnection.prototype,
prop, {
get: getters[prop]
}
);
});
TraceablePeerConnection.prototype._getSSRC = function(rtcId) {
return this.localSSRCs.get(rtcId);
};
/**
* Checks if low fps screensharing is in progress.
*
* @private
* @returns {boolean} Returns true if 5 fps screensharing is in progress, false otherwise.
*/
TraceablePeerConnection.prototype.isSharingLowFpsScreen = function() {
return this._isSharingScreen() && this._capScreenshareBitrate;
};
/**
* Checks if screensharing is in progress.
*
* @returns {boolean} Returns true if a desktop track has been added to the
* peerconnection, false otherwise.
*/
TraceablePeerConnection.prototype._isSharingScreen = function() {
const track = this.getLocalVideoTrack();
return track && track.videoType === VideoType.DESKTOP;
};
/**
* Munges the order of the codecs in the SDP passed based on the preference
* set through config.js settings. All instances of the specified codec are
* moved up to the top of the list when it is preferred. The specified codec
* is deleted from the list if the configuration specifies that the codec be
* disabled.
* @param {RTCSessionDescription} description that needs to be munged.
* @returns {RTCSessionDescription} the munged description.
*/
TraceablePeerConnection.prototype._mungeCodecOrder = function(description) {
if (!this.codecPreference) {
return description;
}
const parsedSdp = transform.parse(description.sdp);
// Only the m-line that defines the source the browser will be sending should need to change.
// This is typically the first m-line with the matching media type.
const mLine = parsedSdp.media.find(m => m.type === this.codecPreference.mediaType);
if (!mLine) {
return description;
}
if (this.codecPreference.enable) {
SDPUtil.preferCodec(mLine, this.codecPreference.mimeType);
// Strip the high profile H264 codecs on mobile clients for p2p connection.
// High profile codecs give better quality at the expense of higher load which
// we do not want on mobile clients.
// Jicofo offers only the baseline code for the jvb connection.
// TODO - add check for mobile browsers once js-utils provides that check.
if (this.codecPreference.mimeType === CodecMimeType.H264 && browser.isReactNative() && this.isP2P) {
SDPUtil.stripCodec(mLine, this.codecPreference.mimeType, true /* high profile */);
}
// Set the max bitrate here on the SDP so that the configured max. bitrate is effective
// as soon as the browser switches to VP9.
if (this.codecPreference.mimeType === CodecMimeType.VP9
&& this.getConfiguredVideoCodec() === CodecMimeType.VP9) {
const bitrates = this.tpcUtils.videoBitrates.VP9 || this.tpcUtils.videoBitrates;
const hdBitrate = bitrates.high ? bitrates.high : HD_BITRATE;
const limit = Math.floor((this._isSharingScreen() ? HD_BITRATE : hdBitrate) / 1000);
// Use only the HD bitrate for now as there is no API available yet for configuring
// the bitrates on the individual SVC layers.
mLine.bandwidth = [ {
type: 'AS',
limit
} ];
} else {
// Clear the bandwidth limit in SDP when VP9 is no longer the preferred codec.
// This is needed on react native clients as react-native-webrtc returns the
// SDP that the application passed instead of returning the SDP off the native side.
// This line automatically gets cleared on web on every renegotiation.
mLine.bandwidth = undefined;
}
} else {
SDPUtil.stripCodec(mLine, this.codecPreference.mimeType);
}
return new RTCSessionDescription({
type: description.type,
sdp: transform.write(parsedSdp)
});
};
/**
* Checks if given track belongs to this peerconnection instance.
*
* @param {JitsiLocalTrack|JitsiRemoteTrack} track - The track to be checked.
* @returns {boolean}
*/
TraceablePeerConnection.prototype.containsTrack = function(track) {
if (track.isLocal()) {
return this.localTracks.has(track.rtcId);
}
const participantId = track.getParticipantId();
const remoteTracksMap = this.remoteTracks.get(participantId);
return Boolean(remoteTracksMap && remoteTracksMap.get(track.getType()) === track);
};
/**
* Add {@link JitsiLocalTrack} to this TPC.
* @param {JitsiLocalTrack} track
* @param {boolean} isInitiator indicates if the endpoint is the offerer.
* @returns {Promise<void>} - resolved when done.
*/
TraceablePeerConnection.prototype.addTrack = function(track, isInitiator = false) {
const rtcId = track.rtcId;
logger.info(`${this} adding ${track}`);
if (this.localTracks.has(rtcId)) {
return Promise.reject(new Error(`${track} is already in ${this}`));
}
this.localTracks.set(rtcId, track);
const webrtcStream = track.getOriginalStream();
if (this._usesUnifiedPlan) {
logger.debug(`${this} TPC.addTrack using unified plan`);
if (webrtcStream) {
try {
this.tpcUtils.addTrack(track, isInitiator);
} catch (error) {
logger.error(`${this} Adding track=${track} failed: ${error?.message}`);
return Promise.reject(error);
}
}
} else {
// Use addStream API for the plan-b case.
if (webrtcStream) {
this._addStream(webrtcStream);
// It's not ok for a track to not have a WebRTC stream if:
} else if (!browser.doesVideoMuteByStreamRemove()
|| track.isAudioTrack()
|| (track.isVideoTrack() && !track.isMuted())) {
return Promise.reject(new Error(`${this} no WebRTC stream for track=${track}`));
}
// Muted video tracks do not have WebRTC stream
if (browser.doesVideoMuteByStreamRemove() && track.isVideoTrack() && track.isMuted()) {
const ssrcInfo = this.generateNewStreamSSRCInfo(track);
this.sdpConsistency.setPrimarySsrc(ssrcInfo.ssrcs[0]);
const simGroup
= ssrcInfo.groups.find(groupInfo => groupInfo.semantics === 'SIM');
if (simGroup) {
this.simulcast.setSsrcCache(simGroup.ssrcs);
}
const fidGroups
= ssrcInfo.groups.filter(
groupInfo => groupInfo.semantics === 'FID');
if (fidGroups) {
const rtxSsrcMapping = new Map();
fidGroups.forEach(fidGroup => {
const primarySsrc = fidGroup.ssrcs[0];
const rtxSsrc = fidGroup.ssrcs[1];
rtxSsrcMapping.set(primarySsrc, rtxSsrc);
});
this.rtxModifier.setSsrcCache(rtxSsrcMapping);
}
}
}
let promiseChain = Promise.resolve();
// On Firefox, the encodings have to be configured on the sender only after the transceiver is created.
if (browser.isFirefox()) {
promiseChain = promiseChain.then(() => webrtcStream && this.tpcUtils.setEncodings(track));
}
return promiseChain;
};
/**
* Adds local track as part of the unmute operation.
* @param {JitsiLocalTrack} track the track to be added as part of the unmute operation.
*
* @return {Promise<boolean>} Promise that resolves to true if the underlying PeerConnection's
* state has changed and renegotiation is required, false if no renegotiation is needed or
* Promise is rejected when something goes wrong.
*/
TraceablePeerConnection.prototype.addTrackUnmute = function(track) {
logger.info(`${this} Adding track=${track} as unmute`);
if (!this._assertTrackBelongs('addTrackUnmute', track)) {
// Abort
return Promise.reject('Track not found on the peerconnection');
}
const webRtcStream = track.getOriginalStream();
if (!webRtcStream) {
logger.error(`${this} Unable to add track=${track} as unmute - no WebRTC stream`);
return Promise.reject('Stream not found');
}
if (this._usesUnifiedPlan) {
return this.tpcUtils.replaceTrack(null, track).then(() => this.isP2P);
}
this._addStream(webRtcStream);
return Promise.resolve(true);
};
/**
* Adds WebRTC media stream to the underlying PeerConnection
* @param {MediaStream} mediaStream
* @private
*/
TraceablePeerConnection.prototype._addStream = function(mediaStream) {
this.peerconnection.addStream(mediaStream);
this._addedStreams.push(mediaStream);
};
/**
* Removes WebRTC media stream from the underlying PeerConection
* @param {MediaStream} mediaStream
*/
TraceablePeerConnection.prototype._removeStream = function(mediaStream) {
this.peerconnection.removeStream(mediaStream);
this._addedStreams
= this._addedStreams.filter(stream => stream !== mediaStream);
};
/**
* This method when called will check if given <tt>localTrack</tt> belongs to
* this TPC (that it has been previously added using {@link addTrack}). If the
* track does not belong an error message will be logged.
* @param {string} methodName the method name that will be logged in an error
* message
* @param {JitsiLocalTrack} localTrack
* @return {boolean} <tt>true</tt> if given local track belongs to this TPC or
* <tt>false</tt> otherwise.
* @private
*/
TraceablePeerConnection.prototype._assertTrackBelongs = function(
methodName,
localTrack) {
const doesBelong = this.localTracks.has(localTrack?.rtcId);
if (!doesBelong) {
logger.error(`${this} ${methodName}: track=${localTrack} does not belong to pc`);
}
return doesBelong;
};
/**
* Returns the codec that is configured on the client as the preferred video codec.
* This takes into account the current order of codecs in the local description sdp.
*
* @returns {CodecMimeType} The codec that is set as the preferred codec to receive
* video in the local SDP.
*/
TraceablePeerConnection.prototype.getConfiguredVideoCodec = function() {
const sdp = this.peerconnection.localDescription?.sdp;
const defaultCodec = CodecMimeType.VP8;
if (!sdp) {
return defaultCodec;
}
const parsedSdp = transform.parse(sdp);
const mLine = parsedSdp.media.find(m => m.type === MediaType.VIDEO);
const codec = mLine.rtp[0].codec;
if (codec) {
return Object.values(CodecMimeType).find(value => value === codec.toLowerCase());
}
return defaultCodec;
};
/**
* Enables or disables simulcast for screenshare based on the frame rate requested for desktop track capture.
*
* @param {number} maxFps framerate to be used for desktop track capture.
*/
TraceablePeerConnection.prototype.setDesktopSharingFrameRate = function(maxFps) {
const lowFps = maxFps <= SS_DEFAULT_FRAME_RATE;
this._capScreenshareBitrate = this.isSimulcastOn() && lowFps;
};
/**
* Sets the codec preference on the peerconnection. The codec preference goes into effect when
* the next renegotiation happens.
*
* @param {CodecMimeType} preferredCodec the preferred codec.
* @param {CodecMimeType} disabledCodec the codec that needs to be disabled.
* @returns {void}
*/
TraceablePeerConnection.prototype.setVideoCodecs = function(preferredCodec = null, disabledCodec = null) {
// If both enable and disable are set, disable settings will prevail.
const enable = disabledCodec === null;
const mimeType = disabledCodec ? disabledCodec : preferredCodec;
if (this.codecPreference && (preferredCodec || disabledCodec)) {
this.codecPreference.enable = enable;
this.codecPreference.mimeType = mimeType;
} else if (preferredCodec || disabledCodec) {
this.codecPreference = {
enable,
mediaType: MediaType.VIDEO,
mimeType
};
} else {
logger.warn(`${this} Invalid codec settings[preferred=${preferredCodec},disabled=${disabledCodec}],
atleast one value is needed`);
}
};
/**
* Tells if the given WebRTC <tt>MediaStream</tt> has been added to
* the underlying WebRTC PeerConnection.
* @param {MediaStream} mediaStream
* @returns {boolean}
*/
TraceablePeerConnection.prototype.isMediaStreamInPc = function(mediaStream) {
return this._addedStreams.indexOf(mediaStream) > -1;
};
/**
* Remove local track from this TPC.
* @param {JitsiLocalTrack} localTrack the track to be removed from this TPC.
*
* FIXME It should probably remove a boolean just like {@link removeTrackMute}
* The same applies to addTrack.
*/
TraceablePeerConnection.prototype.removeTrack = function(localTrack) {
const webRtcStream = localTrack.getOriginalStream();
this.trace(
'removeStream',
localTrack.rtcId, webRtcStream ? webRtcStream.id : undefined);
if (!this._assertTrackBelongs('removeStream', localTrack)) {
// Abort - nothing to be done here
return;
}
this.localTracks.delete(localTrack.rtcId);
this.localSSRCs.delete(localTrack.rtcId);
if (webRtcStream) {
this.peerconnection.removeStream(webRtcStream);
}
};
/**
* Returns the sender corresponding to the given media type.
* @param {MEDIA_TYPE} mediaType - The media type 'audio' or 'video' to be used for the search.
* @returns {RTPSender|undefined} - The found sender or undefined if no sender
* was found.
*/
TraceablePeerConnection.prototype.findSenderByKind = function(mediaType) {
return this.peerconnection.getSenders().find(s => s.track && s.track.kind === mediaType);
};
/**
* Returns the receiver corresponding to the given MediaStreamTrack.
*
* @param {MediaSreamTrack} track - The media stream track used for the search.
* @returns {RTCRtpReceiver|undefined} - The found receiver or undefined if no receiver
* was found.
*/
TraceablePeerConnection.prototype.findReceiverForTrack = function(track) {
return this.peerconnection.getReceivers().find(r => r.track === track);
};
/**
* Returns the sender corresponding to the given MediaStreamTrack.
*
* @param {MediaSreamTrack} track - The media stream track used for the search.
* @returns {RTCRtpSender|undefined} - The found sender or undefined if no sender
* was found.
*/
TraceablePeerConnection.prototype.findSenderForTrack = function(track) {
return this.peerconnection.getSenders().find(s => s.track === track);
};
/**
* Replaces <tt>oldTrack</tt> with <tt>newTrack</tt> from the peer connection.
* Either <tt>oldTrack</tt> or <tt>newTrack</tt> can be null; replacing a valid
* <tt>oldTrack</tt> with a null <tt>newTrack</tt> effectively just removes
* <tt>oldTrack</tt>
*
* @param {JitsiLocalTrack|null} oldTrack - The current track in use to be replaced on the pc.
* @param {JitsiLocalTrack|null} newTrack - The new track to be used.
*
* @returns {Promise<boolean>} - If the promise resolves with true, renegotiation will be needed.
* Otherwise no renegotiation is needed.
*/
TraceablePeerConnection.prototype.replaceTrack = function(oldTrack, newTrack) {
if (!(oldTrack || newTrack)) {
logger.info(`${this} replaceTrack called with no new track and no old track`);
return Promise.resolve();
}
// If a track is being added to the peerconnection for the first time, we want the source signaling to be sent to
// Jicofo before the mute state is sent over presence. Therefore, trigger a renegotiation in this case. If we
// rely on "negotiationneeded" fired by the browser to signal new ssrcs, the mute state in presence will be sent
// before the source signaling which is undesirable.
const negotiationNeeded = Boolean(!oldTrack || !this.localTracks.has(oldTrack?.rtcId));
if (this._usesUnifiedPlan) {
logger.debug(`${this} TPC.replaceTrack using unified plan`);
const mediaType = newTrack?.getType() ?? oldTrack?.getType();
const stream = newTrack?.getOriginalStream();
const promise = newTrack && !stream
// Ignore cases when the track is replaced while the device is in a muted state.
// The track will be replaced again on the peerconnection when the user unmutes.
? Promise.resolve()
: this.tpcUtils.replaceTrack(oldTrack, newTrack);
const transceiver = this.tpcUtils.findTransceiver(mediaType, oldTrack);
return promise
.then(() => {
oldTrack && this.localTracks.delete(oldTrack.rtcId);
newTrack && this.localTracks.set(newTrack.rtcId, newTrack);
if (transceiver) {
// Set the transceiver direction.
transceiver.direction = newTrack ? MediaDirection.SENDRECV : MediaDirection.RECVONLY;
}
// Avoid configuring the encodings on Chromium/Safari until simulcast is configured
// for the newly added track using SDP munging which happens during the renegotiation.
const configureEncodingsPromise = browser.usesSdpMungingForSimulcast() || !newTrack
? Promise.resolve()
: this.tpcUtils.setEncodings(newTrack);
// Renegotiate only in the case of P2P. We rely on 'negotiationeeded' to be fired for JVB.
return configureEncodingsPromise.then(() => this.isP2P || negotiationNeeded);
});
}
logger.debug(`${this} TPC.replaceTrack using plan B`);
let promiseChain = Promise.resolve();
if (oldTrack) {
this.removeTrack(oldTrack);
}
if (newTrack) {
promiseChain = this.addTrack(newTrack);
}
return promiseChain.then(() => true);
};
/**
* Removes local track as part of the mute operation.
* @param {JitsiLocalTrack} localTrack the local track to be remove as part of
* the mute operation.
* @return {Promise<boolean>} Promise that resolves to true if the underlying PeerConnection's
* state has changed and renegotiation is required, false if no renegotiation is needed or
* Promise is rejected when something goes wrong.
*/
TraceablePeerConnection.prototype.removeTrackMute = function(localTrack) {
const webRtcStream = localTrack.getOriginalStream();
this.trace(
'removeStreamMute',
localTrack.rtcId, webRtcStream ? webRtcStream.id : null);
if (!this._assertTrackBelongs('removeStreamMute', localTrack)) {
// Abort - nothing to be done here
return Promise.reject('Track not found in the peerconnection');
}
if (this._usesUnifiedPlan) {
return this.tpcUtils.replaceTrack(localTrack, null);
}
if (webRtcStream) {
logger.info(`${this} Removing track=${localTrack} as mute`);
this._removeStream(webRtcStream);
return Promise.resolve(true);
}
logger.error(`${this} removeStreamMute - no WebRTC stream for track=${localTrack}`);
return Promise.reject('Stream not found');
};
TraceablePeerConnection.prototype.createDataChannel = function(label, opts) {
this.trace('createDataChannel', label, opts);
return this.peerconnection.createDataChannel(label, opts);
};
/**
* Ensures that the simulcast ssrc-group appears after any other ssrc-groups
* in the SDP so that simulcast is properly activated.
*
* @param {Object} localSdp the WebRTC session description instance for
* the local description.
* @private
*/
TraceablePeerConnection.prototype._ensureSimulcastGroupIsLast = function(
localSdp) {
let sdpStr = localSdp.sdp;
const videoStartIndex = sdpStr.indexOf('m=video');
const simStartIndex = sdpStr.indexOf('a=ssrc-group:SIM', videoStartIndex);
let otherStartIndex = sdpStr.lastIndexOf('a=ssrc-group');
if (simStartIndex === -1
|| otherStartIndex === -1
|| otherStartIndex === simStartIndex) {
return localSdp;
}
const simEndIndex = sdpStr.indexOf('\r\n', simStartIndex);
const simStr = sdpStr.substring(simStartIndex, simEndIndex + 2);
sdpStr = sdpStr.replace(simStr, '');
otherStartIndex = sdpStr.lastIndexOf('a=ssrc-group');
const otherEndIndex = sdpStr.indexOf('\r\n', otherStartIndex);
const sdpHead = sdpStr.slice(0, otherEndIndex);
const simStrTrimmed = simStr.trim();
const sdpTail = sdpStr.slice(otherEndIndex);
sdpStr = `${sdpHead}\r\n${simStrTrimmed}${sdpTail}`;
return new RTCSessionDescription({
type: localSdp.type,
sdp: sdpStr
});
};
/**
* Will adjust audio and video media direction in the given SDP object to
* reflect the current status of the {@link audioTransferActive} and
* {@link videoTransferActive} flags.
* @param {RTCSessionDescription} localDescription the WebRTC session description instance for
* the local description.
* @private
*/
TraceablePeerConnection.prototype._adjustLocalMediaDirection = function(localDescription) {
const transformer = new SdpTransformWrap(localDescription.sdp);
let modifiedDirection = false;
const audioMedia = transformer.selectMedia(MediaType.AUDIO);
if (audioMedia) {
const desiredAudioDirection = this.getDesiredMediaDirection(MediaType.AUDIO);
if (audioMedia.direction !== desiredAudioDirection) {
audioMedia.direction = desiredAudioDirection;
logger.info(`${this} Adjusted local audio direction to ${desiredAudioDirection}`);
modifiedDirection = true;
}
} else {
logger.warn(`${this} No "audio" media found in the local description`);
}
const videoMedia = transformer.selectMedia(MediaType.VIDEO);
if (videoMedia) {
const desiredVideoDirection = this.getDesiredMediaDirection(MediaType.VIDEO);
if (videoMedia.direction !== desiredVideoDirection) {
videoMedia.direction = desiredVideoDirection;
logger.info(`${this} Adjusted local video direction to ${desiredVideoDirection}`);
modifiedDirection = true;
}
} else {
logger.warn(`${this} No "video" media found in the local description`);
}
if (modifiedDirection) {
return new RTCSessionDescription({
type: localDescription.type,
sdp: transformer.toRawSDP()
});
}
return localDescription;
};
/**
* Adjusts the media direction on the remote description based on availability of local and remote sources in a p2p
* media connection.
*
* @param {RTCSessionDescription} remoteDescription the WebRTC session description instance for the remote description.
* @returns the transformed remoteDescription.
* @private
*/
TraceablePeerConnection.prototype._adjustRemoteMediaDirection = function(remoteDescription) {
const transformer = new SdpTransformWrap(remoteDescription.sdp);
[ MediaType.AUDIO, MediaType.VIDEO ].forEach(mediaType => {
const media = transformer.selectMedia(mediaType);
const hasLocalSource = this.hasAnyTracksOfType(mediaType);
const hasRemoteSource = this.getRemoteTracks(null, mediaType).length > 0;
media.direction = hasLocalSource && hasRemoteSource
? MediaDirection.SENDRECV
: hasLocalSource
? MediaDirection.RECVONLY
: hasRemoteSource ? MediaDirection.SENDONLY : MediaDirection.INACTIVE;
});
return new RTCSessionDescription({
type: remoteDescription.type,
sdp: transformer.toRawSDP()
});
};
/**
* Munges the stereo flag as well as the opusMaxAverageBitrate in the SDP, based
* on values set through config.js, if present.
*
* @param {RTCSessionDescription} description that needs to be munged.
* @returns {RTCSessionDescription} the munged description.
*/
TraceablePeerConnection.prototype._mungeOpus = function(description) {
const { audioQuality } = this.options;
if (!audioQuality?.stereo && !audioQuality?.opusMaxAverageBitrate) {
return description;
}
const parsedSdp = transform.parse(description.sdp);
const mLines = parsedSdp.media;
for (const mLine of mLines) {
if (mLine.type === 'audio') {
const { payload } = mLine.rtp.find(protocol => protocol.codec === CodecMimeType.OPUS);
if (!payload) {
// eslint-disable-next-line no-continue
continue;
}
let fmtpOpus = mLine.fmtp.find(protocol => protocol.payload === payload);
if (!fmtpOpus) {
fmtpOpus = {
payload,
config: ''
};
}
const fmtpConfig = transform.parseParams(fmtpOpus.config);
let sdpChanged = false;
if (audioQuality?.stereo) {
fmtpConfig.stereo = 1;
sdpChanged = true;
}
if (audioQuality?.opusMaxAverageBitrate) {
fmtpConfig.maxaveragebitrate = audioQuality.opusMaxAverageBitrate;
sdpChanged = true;
}
if (!sdpChanged) {
// eslint-disable-next-line no-continue
continue;
}
let mungedConfig = '';
for (const key of Object.keys(fmtpConfig)) {
mungedConfig += `${key}=${fmtpConfig[key]}; `;
}
fmtpOpus.config = mungedConfig.trim();
}
}
return new RTCSessionDescription({
type: description.type,
sdp: transform.write(parsedSdp)
});
};
/**
* Configures the stream encodings depending on the video type and the bitrates configured.
*
* @returns {Promise} promise that will be resolved when the operation is successful and rejected otherwise.
*/
TraceablePeerConnection.prototype.configureSenderVideoEncodings = function() {
return this.setSenderVideoConstraints(this._senderVideoMaxHeight);
};
TraceablePeerConnection.prototype.setLocalDescription = function(description) {
let localSdp = description;
this.trace('setLocalDescription::preTransform', dumpSDP(localSdp));
// Munge stereo flag and opusMaxAverageBitrate based on config.js
localSdp = this._mungeOpus(localSdp);
if (!this._usesUnifiedPlan) {
localSdp = this._adjustLocalMediaDirection(localSdp);
localSdp = this._ensureSimulcastGroupIsLast(localSdp);
} else if (!this.isP2P) {
// if we're using unified plan, transform to it first.
localSdp = this.interop.toUnifiedPlan(localSdp);
this.trace(
'setLocalDescription::postTransform (Unified Plan)',
dumpSDP(localSdp));
}
// Munge the order of the codecs based on the preferences set through config.js if we are using SDP munging.
if (!this._usesTransceiverCodecPreferences) {
localSdp = this._mungeCodecOrder(localSdp);
}
return new Promise((resolve, reject) => {
this.peerconnection.setLocalDescription(localSdp)
.then(() => {
this.trace('setLocalDescriptionOnSuccess');
const localUfrag = SDPUtil.getUfrag(localSdp.sdp);
if (localUfrag !== this.localUfrag) {
this.localUfrag = localUfrag;
this.eventEmitter.emit(
RTCEvents.LOCAL_UFRAG_CHANGED, this, localUfrag);
}
resolve();
}, err => {
this.trace('setLocalDescriptionOnFailure', err);
this.eventEmitter.emit(
RTCEvents.SET_LOCAL_DESCRIPTION_FAILED,
err, this);
reject(err);
});
});
};
/**
* Enables/disables audio media transmission on this peer connection. When
* disabled the SDP audio media direction in the local SDP will be adjusted to
* 'inactive' which means that no data will be sent nor accepted, but
* the connection should be kept alive.
* @param {boolean} active <tt>true</tt> to enable audio media transmission or
* <tt>false</tt> to disable. If the value is not a boolean the call will have
* no effect.
* @return {boolean} <tt>true</tt> if the value has changed and sRD/sLD cycle
* needs to be executed in order for the changes to take effect or
* <tt>false</tt> if the given value was the same as the previous one.
* @public
*/
TraceablePeerConnection.prototype.setAudioTransferActive = function(active) {
logger.debug(`${this} audio transfer active: ${active}`);
const changed = this.audioTransferActive !== active;
this.audioTransferActive = active;
if (this._usesUnifiedPlan) {
this.tpcUtils.setAudioTransferActive(active);
// false means no renegotiation up the chain which is not needed in the Unified mode
return false;
}
return changed;
};
TraceablePeerConnection.prototype.setRemoteDescription = function(description) {
this.trace('setRemoteDescription::preTransform', dumpSDP(description));
/* eslint-disable no-param-reassign */
// Munge stereo flag and opusMaxAverageBitrate based on config.js
description = this._mungeOpus(description);
/* eslint-enable no-param-reassign */
if (!this._usesUnifiedPlan) {
// TODO the focus should squeze or explode the remote simulcast
if (this.isSimulcastOn()) {
// eslint-disable-next-line no-param-reassign
description = this.simulcast.mungeRemoteDescription(description, true /* add x-google-conference flag */);
this.trace(
'setRemoteDescription::postTransform (simulcast)',
dumpSDP(description));
}
// eslint-disable-next-line no-param-reassign
description = normalizePlanB(description);
} else if (!this.isP2P) {
const currentDescription = this.peerconnection.remoteDescription;
// eslint-disable-next-line no-param-reassign
description = this.interop.toUnifiedPlan(description, currentDescription);
this.trace(
'setRemoteDescription::postTransform (Unified)',
dumpSDP(description));
if (this.isSimulcastOn()) {
// eslint-disable-next-line no-param-reassign
description = this.simulcast.mungeRemoteDescription(description);
// eslint-disable-next-line no-param-reassign
description = this.tpcUtils.insertUnifiedPlanSimulcastReceive(description);
this.trace(
'setRemoteDescription::postTransform (sim receive)',
dumpSDP(description));
}
}
// Munge the order of the codecs based on the preferences set through config.js.
// eslint-disable-next-line no-param-reassign
description = this._mungeCodecOrder(description);
if (this._usesUnifiedPlan) {
// eslint-disable-next-line no-param-reassign
description = this.tpcUtils.ensureCorrectOrderOfSsrcs(description);
}
return new Promise((resolve, reject) => {
this.peerconnection.setRemoteDescription(description)
.then(() => {
this.trace('setRemoteDescriptionOnSuccess');
const remoteUfrag = SDPUtil.getUfrag(description.sdp);
if (remoteUfrag !== this.remoteUfrag) {
this.remoteUfrag = remoteUfrag;
this.eventEmitter.emit(
RTCEvents.REMOTE_UFRAG_CHANGED, this, remoteUfrag);
}
resolve();
}, err => {
this.trace('setRemoteDescriptionOnFailure', err);
this.eventEmitter.emit(
RTCEvents.SET_REMOTE_DESCRIPTION_FAILED,
err,
this);
reject(err);
});
});
};
/**
* Changes the resolution of the video stream that is sent to the peer based on the resolution requested by the peer
* and user preference, sets the degradation preference on the sender based on the video type, configures the maximum
* bitrates on the send stream.
*
* @param {number} frameHeight - The max frame height to be imposed on the outgoing video stream.
* @returns {Promise} promise that will be resolved when the operation is successful and rejected otherwise.
*/
TraceablePeerConnection.prototype.setSenderVideoConstraints = function(frameHeight) {
if (frameHeight < 0) {
throw new Error(`Invalid frameHeight: ${frameHeight}`);
}
// XXX: This is not yet supported on mobile.
if (browser.isReactNative()) {
return Promise.resolve();
}
this._senderVideoMaxHeight = frameHeight;
const localVideoTrack = this.getLocalVideoTrack();
if (!localVideoTrack || localVideoTrack.isMuted()) {
return Promise.resolve();
}
const videoSender = this.findSenderByKind(MediaType.VIDEO);
if (!videoSender) {
return Promise.resolve();
}
const parameters = videoSender.getParameters();
if (!parameters?.encodings?.length) {
return Promise.resolve();
}
// Set the degradation preference.
const preference = this.isSharingLowFpsScreen()
? DEGRADATION_PREFERENCE_DESKTOP // Prefer resolution for low fps share.
: DEGRADATION_PREFERENCE_CAMERA; // Prefer frame-rate for high fps share and camera.
parameters.degradationPreference = preference;
logger.info(`${this} Setting degradation preference [preference=${preference},track=${localVideoTrack}`);
// Calculate the encodings active state based on the resolution requested by the bridge.
this.encodingsEnabledState = this.tpcUtils.calculateEncodingsActiveState(localVideoTrack, frameHeight);
const maxBitrates = this.tpcUtils.calculateEncodingsBitrates(localVideoTrack);
const videoType = localVideoTrack.getVideoType();
if (this.isSimulcastOn()) {
for (const encoding in parameters.encodings) {
if (parameters.encodings.hasOwnProperty(encoding)) {
parameters.encodings[encoding].active = this.encodingsEnabledState[encoding];
// Firefox doesn't follow the spec and lets application specify the degradation preference on the
// encodings.
browser.isFirefox() && (parameters.encodings[encoding].degradationPreference = preference);
// Max bitrates are configured on the encodings only for VP8.
if (this.getConfiguredVideoCodec() === CodecMimeType.VP8
&& (this.options?.videoQuality?.maxBitratesVideo
|| this.isSharingLowFpsScreen()
|| this._usesUnifiedPlan)) {
parameters.encodings[encoding].maxBitrate = maxBitrates[encoding];
}
}
}
this.tpcUtils.updateEncodingsResolution(parameters);
// For p2p and cases and where simulcast is explicitly disabled.
} else if (frameHeight > 0) {
let scaleFactor = HD_SCALE_FACTOR;
// Do not scale down encodings for desktop tracks for non-simulcast case.
if (videoType === VideoType.CAMERA && localVideoTrack.resolution > frameHeight) {
scaleFactor = Math.floor(localVideoTrack.resolution / frameHeight);
}
parameters.encodings[0].active = true;
parameters.encodings[0].scaleResolutionDownBy = scaleFactor;
// Firefox doesn't follow the spec and lets application specify the degradation preference on the encodings.
browser.isFirefox() && (parameters.encodings[0].degradationPreference = preference);
// Configure the bitrate.
if (this.getConfiguredVideoCodec() === CodecMimeType.VP8 && this.options?.videoQuality?.maxBitratesVideo) {
let bitrate = this.getTargetVideoBitrates()?.high;
if (videoType === VideoType.CAMERA) {
bitrate = this.tpcUtils.localStreamEncodingsConfig
.find(layer => layer.scaleResolutionDownBy === scaleFactor)?.maxBitrate ?? bitrate;
}
parameters.encodings[0].maxBitrate = bitrate;
}
} else {
parameters.encodings[0].active = false;
}
logger.info(`${this} setting max height=${frameHeight},encodings=${JSON.stringify(parameters.encodings)}`);
return videoSender.setParameters(parameters).then(() => {
localVideoTrack.maxEnabledResolution = frameHeight;
this.eventEmitter.emit(RTCEvents.LOCAL_TRACK_MAX_ENABLED_RESOLUTION_CHANGED, localVideoTrack);
});
};
/**
* Enables/disables video media transmission on this peer connection. When
* disabled the SDP video media direction in the local SDP will be adjusted to
* 'inactive' which means that no data will be sent nor accepted, but
* the connection should be kept alive.
* @param {boolean} active <tt>true</tt> to enable video media transmission or
* <tt>false</tt> to disable. If the value is not a boolean the call will have
* no effect.
* @return {boolean} <tt>true</tt> if the value has changed and sRD/sLD cycle
* needs to be executed in order for the changes to take effect or
* <tt>false</tt> if the given value was the same as the previous one.
* @public
*/
TraceablePeerConnection.prototype.setVideoTransferActive = function(active) {
logger.debug(`${this} video transfer active: ${active}`);
const changed = this.videoTransferActive !== active;
this.videoTransferActive = active;
if (this._usesUnifiedPlan) {
this.tpcUtils.setVideoTransferActive(active);
// false means no renegotiation up the chain which is not needed in the Unified mode
return false;
}
return changed;
};
/**
* Sends DTMF tones if possible.
*
* @param {string} tones - The DTMF tones string as defined by {@code RTCDTMFSender.insertDTMF}, 'tones' argument.
* @param {number} duration - The amount of time in milliseconds that each DTMF should last. It's 200ms by default.
* @param {number} interToneGap - The length of time in miliseconds to wait between tones. It's 200ms by default.
*
* @returns {void}
*/
TraceablePeerConnection.prototype.sendTones = function(tones, duration = 200, interToneGap = 200) {
if (!this._dtmfSender) {
if (this.peerconnection.getSenders) {
const rtpSender = this.peerconnection.getSenders().find(s => s.dtmf);
this._dtmfSender = rtpSender && rtpSender.dtmf;
this._dtmfSender && logger.info(`${this} initialized DTMFSender using getSenders`);
}
if (!this._dtmfSender) {
const localAudioTrack = Array.from(this.localTracks.values()).find(t => t.isAudioTrack());
if (this.peerconnection.createDTMFSender && localAudioTrack) {
this._dtmfSender = this.peerconnection.createDTMFSender(localAudioTrack.getTrack());
}
this._dtmfSender && logger.info(`${this} initialized DTMFSender using deprecated createDTMFSender`);
}
if (this._dtmfSender) {
this._dtmfSender.ontonechange = this._onToneChange.bind(this);
}
}
if (this._dtmfSender) {
if (this._dtmfSender.toneBuffer) {
this._dtmfTonesQueue.push({
tones,
duration,
interToneGap
});
return;
}
this._dtmfSender.insertDTMF(tones, duration, interToneGap);
} else {
logger.warn(`${this} sendTones - failed to select DTMFSender`);
}
};
/**
* Callback ivoked by {@code this._dtmfSender} when it has finished playing
* a single tone.
*
* @param {Object} event - The tonechange event which indicates what characters
* are left to be played for the current tone.
* @private
* @returns {void}
*/
TraceablePeerConnection.prototype._onToneChange = function(event) {
// An empty event.tone indicates the current tones have finished playing.
// Automatically start playing any queued tones on finish.
if (this._dtmfSender && event.tone === '' && this._dtmfTonesQueue.length) {
const { tones, duration, interToneGap } = this._dtmfTonesQueue.shift();
this._dtmfSender.insertDTMF(tones, duration, interToneGap);
}
};
/**
* Makes the underlying TraceablePeerConnection generate new SSRC for
* the recvonly video stream.
*/
TraceablePeerConnection.prototype.generateRecvonlySsrc = function() {
const newSSRC = SDPUtil.generateSsrc();
logger.info(`${this} generated new recvonly SSRC=${newSSRC}`);
this.sdpConsistency.setPrimarySsrc(newSSRC);
};
/**
* Makes the underlying TraceablePeerConnection forget the current primary video
* SSRC.
*/
TraceablePeerConnection.prototype.clearRecvonlySsrc = function() {
logger.info(`${this} Clearing primary video SSRC!`);
this.sdpConsistency.clearVideoSsrcCache();
};
/**
* Closes underlying WebRTC PeerConnection instance and removes all remote
* tracks by emitting {@link RTCEvents.REMOTE_TRACK_REMOVED} for each one of
* them.
*/
TraceablePeerConnection.prototype.close = function() {
this.trace('stop');
// Off SignalingEvents
this.signalingLayer.off(SignalingEvents.PEER_MUTED_CHANGED, this._peerMutedChanged);
this.signalingLayer.off(SignalingEvents.PEER_VIDEO_TYPE_CHANGED, this._peerVideoTypeChanged);
this._usesUnifiedPlan && this.peerconnection.removeEventListener('track', this.onTrack);
for (const peerTracks of this.remoteTracks.values()) {
for (const remoteTrack of peerTracks.values()) {
this._removeRemoteTrack(remoteTrack);
}
}
this.remoteTracks.clear();
this._addedStreams = [];
this._dtmfSender = null;
this._dtmfTonesQueue = [];
if (!this.rtc._removePeerConnection(this)) {
logger.error(`${this} RTC._removePeerConnection returned false`);
}
if (this.statsinterval !== null) {
window.clearInterval(this.statsinterval);
this.statsinterval = null;
}
logger.info(`${this} Closing peerconnection`);
this.peerconnection.close();
};
TraceablePeerConnection.prototype.createAnswer = function(constraints) {
return this._createOfferOrAnswer(false /* answer */, constraints);
};
TraceablePeerConnection.prototype.createOffer = function(constraints) {
return this._createOfferOrAnswer(true /* offer */, constraints);
};
TraceablePeerConnection.prototype._createOfferOrAnswer = function(
isOffer,
constraints) {
const logName = isOffer ? 'Offer' : 'Answer';
this.trace(`create${logName}`, JSON.stringify(constraints, null, ' '));
const handleSuccess = (resultSdp, resolveFn, rejectFn) => {
try {
this.trace(
`create${logName}OnSuccess::preTransform`, dumpSDP(resultSdp));
if (!this._usesUnifiedPlan) {
// If there are no local video tracks, then a "recvonly"
// SSRC needs to be generated
if (!this.hasAnyTracksOfType(MediaType.VIDEO)
&& !this.sdpConsistency.hasPrimarySsrcCached()) {
this.generateRecvonlySsrc();
}
// eslint-disable-next-line no-param-reassign
resultSdp = new RTCSessionDescription({
type: resultSdp.type,
sdp: this.sdpConsistency.makeVideoPrimarySsrcsConsistent(
resultSdp.sdp)
});
this.trace(
`create${logName}OnSuccess::postTransform `
+ '(make primary audio/video ssrcs consistent)',
dumpSDP(resultSdp));
}
const localVideoTrack = this.getLocalVideoTrack();
// Configure simulcast for camera tracks and for desktop tracks that need simulcast.
if (this.isSimulcastOn() && browser.usesSdpMungingForSimulcast()
&& (localVideoTrack?.getVideoType() === VideoType.CAMERA
|| this._usesUnifiedPlan
|| !this.isSharingLowFpsScreen())) {
// eslint-disable-next-line no-param-reassign
resultSdp = this.simulcast.mungeLocalDescription(resultSdp);
this.trace(
`create${logName}`
+ 'OnSuccess::postTransform (simulcast)',
dumpSDP(resultSdp));
}
if (!this.options.disableRtx && browser.usesSdpMungingForSimulcast()) {
// eslint-disable-next-line no-param-reassign
resultSdp = new RTCSessionDescription({
type: resultSdp.type,
sdp: this.rtxModifier.modifyRtxSsrcs(resultSdp.sdp)
});
this.trace(
`create${logName}`
+ 'OnSuccess::postTransform (rtx modifier)',
dumpSDP(resultSdp));
}
const ssrcMap = this._extractSSRCMap(resultSdp);
this._processLocalSSRCsMap(ssrcMap);
resolveFn(resultSdp);
} catch (e) {
this.trace(`create${logName}OnError`, e);
this.trace(`create${logName}OnError`, dumpSDP(resultSdp));
logger.error(`${this} create${logName}OnError`, e, dumpSDP(resultSdp));
rejectFn(e);
}
};
const handleFailure = (err, rejectFn) => {
this.trace(`create${logName}OnFailure`, err);
const eventType
= isOffer
? RTCEvents.CREATE_OFFER_FAILED
: RTCEvents.CREATE_ANSWER_FAILED;
this.eventEmitter.emit(eventType, err, this);
rejectFn(err);
};
// Set the codec preference before creating an offer or answer so that the generated SDP will have
// the correct preference order.
if (this._usesTransceiverCodecPreferences) {
const transceiver = this.peerconnection.getTransceivers()
.find(t => t.receiver && t.receiver?.track?.kind === MediaType.VIDEO);
if (transceiver) {
let capabilities = RTCRtpReceiver.getCapabilities(MediaType.VIDEO)?.codecs;
const mimeType = this.codecPreference?.mimeType;
const enable = this.codecPreference?.enable;
if (capabilities && mimeType && enable) {
// Move the desired codec (all variations of it as well) to the beginning of the list.
/* eslint-disable-next-line arrow-body-style */
capabilities.sort(caps => {
return caps.mimeType.toLowerCase() === `${MediaType.VIDEO}/${mimeType}` ? -1 : 1;
});
} else if (capabilities && mimeType) {
capabilities = capabilities
.filter(caps => caps.mimeType.toLowerCase() !== `${MediaType.VIDEO}/${mimeType}`);
}
// Disable ulpfec on Google Chrome and derivatives because
// https://bugs.chromium.org/p/chromium/issues/detail?id=1276427
if (browser.isChromiumBased()) {
capabilities = capabilities
.filter(caps => caps.mimeType.toLowerCase() !== `${MediaType.VIDEO}/${CodecMimeType.ULPFEC}`);
}
try {
transceiver.setCodecPreferences(capabilities);
} catch (err) {
logger.warn(`${this} Setting codec[preference=${mimeType},enable=${enable}] failed`, err);
}
}
}
return new Promise((resolve, reject) => {
let oaPromise;
if (isOffer) {
oaPromise = this.peerconnection.createOffer(constraints);
} else {
oaPromise = this.peerconnection.createAnswer(constraints);
}
oaPromise
.then(
sdp => handleSuccess(sdp, resolve, reject),
error => handleFailure(error, reject));
});
};
/**
* Extract primary SSRC from given {@link TrackSSRCInfo} object.
* @param {TrackSSRCInfo} ssrcObj
* @return {number|null} the primary SSRC or <tt>null</tt>
*/
TraceablePeerConnection.prototype._extractPrimarySSRC = function(ssrcObj) {
if (ssrcObj && ssrcObj.groups && ssrcObj.groups.length) {
return ssrcObj.groups[0].ssrcs[0];
} else if (ssrcObj && ssrcObj.ssrcs && ssrcObj.ssrcs.length) {
return ssrcObj.ssrcs[0];
}
return null;
};
/**
* Goes over the SSRC map extracted from the latest local description and tries
* to match them with the local tracks (by MSID). Will update the values
* currently stored in the {@link TraceablePeerConnection.localSSRCs} map.
* @param {Map<string,TrackSSRCInfo>} ssrcMap
* @private
*/
TraceablePeerConnection.prototype._processLocalSSRCsMap = function(ssrcMap) {
for (const track of this.localTracks.values()) {
const sourceIdentifier = this._usesUnifiedPlan ? track.getType() : track.storedMSID;
if (ssrcMap.has(sourceIdentifier)) {
const newSSRC = ssrcMap.get(sourceIdentifier);
if (!newSSRC) {
logger.error(`${this} No SSRC found for stream=${sourceIdentifier}`);
return;
}
const oldSSRC = this.localSSRCs.get(track.rtcId);
const newSSRCNum = this._extractPrimarySSRC(newSSRC);
const oldSSRCNum = this._extractPrimarySSRC(oldSSRC);
// eslint-disable-next-line no-negated-condition
if (newSSRCNum !== oldSSRCNum) {
oldSSRCNum && logger.error(`${this} Overwriting SSRC for track=${track}] with ssrc=${newSSRC}`);
this.localSSRCs.set(track.rtcId, newSSRC);
this.eventEmitter.emit(RTCEvents.LOCAL_TRACK_SSRC_UPDATED, track, newSSRCNum);
}
} else if (!track.isVideoTrack() && !track.isMuted()) {
// It is normal to find no SSRCs for a muted video track in
// the local SDP as the recv-only SSRC is no longer munged in.
// So log the warning only if it's not a muted video track.
logger.warn(`${this} No SSRCs found in the local SDP for track=${track}, stream=${sourceIdentifier}`);
}
}
};
TraceablePeerConnection.prototype.addIceCandidate = function(candidate) {
this.trace('addIceCandidate', JSON.stringify({
candidate: candidate.candidate,
sdpMid: candidate.sdpMid,
sdpMLineIndex: candidate.sdpMLineIndex,
usernameFragment: candidate.usernameFragment
}, null, ' '));
return this.peerconnection.addIceCandidate(candidate);
};
/**
* Returns the number of simulcast streams that are currently enabled on the peerconnection.
*
* @returns {number} The number of simulcast streams currently enabled or 1 when simulcast is disabled.
*/
TraceablePeerConnection.prototype.getActiveSimulcastStreams = function() {
let activeStreams = 1;
if (this.isSimulcastOn() && this.encodingsEnabledState) {
activeStreams = this.encodingsEnabledState.filter(stream => Boolean(stream))?.length;
} else if (this.isSimulcastOn()) {
activeStreams = SIM_LAYER_RIDS.length;
}
return activeStreams;
};
/**
* Obtains call-related stats from the peer connection.
*
* @returns {Promise<Object>} Promise which resolves with data providing statistics about
* the peerconnection.
*/
TraceablePeerConnection.prototype.getStats = function() {
return this.peerconnection.getStats();
};
/**
* Generates and stores new SSRC info object for given local track.
* The method should be called only for a video track being added to this TPC
* in the muted state (given that the current browser uses this strategy).
* @param {JitsiLocalTrack} track
* @return {TPCSSRCInfo}
*/
TraceablePeerConnection.prototype.generateNewStreamSSRCInfo = function(track) {
const rtcId = track.rtcId;
let ssrcInfo = this._getSSRC(rtcId);
if (ssrcInfo) {
logger.error(`${this} Overwriting local SSRCs for track id=${rtcId}`);
}
// Configure simulcast for camera tracks and desktop tracks that need simulcast.
if (this.isSimulcastOn()
&& (track.getVideoType() === VideoType.CAMERA || !this.isSharingLowFpsScreen())) {
ssrcInfo = {
ssrcs: [],
groups: []
};
for (let i = 0; i < SIM_LAYER_RIDS.length; i++) {
ssrcInfo.ssrcs.push(SDPUtil.generateSsrc());
}
ssrcInfo.groups.push({
ssrcs: ssrcInfo.ssrcs.slice(),
semantics: 'SIM'
});
} else {
ssrcInfo = {
ssrcs: [ SDPUtil.generateSsrc() ],
groups: []
};
}
if (!this.options.disableRtx) {
// Specifically use a for loop here because we'll
// be adding to the list we're iterating over, so we
// only want to iterate through the items originally
// on the list
const currNumSsrcs = ssrcInfo.ssrcs.length;
for (let i = 0; i < currNumSsrcs; ++i) {
const primarySsrc = ssrcInfo.ssrcs[i];
const rtxSsrc = SDPUtil.generateSsrc();
ssrcInfo.ssrcs.push(rtxSsrc);
ssrcInfo.groups.push({
ssrcs: [ primarySsrc, rtxSsrc ],
semantics: 'FID'
});
}
}
ssrcInfo.msid = track.storedMSID;
this.localSSRCs.set(rtcId, ssrcInfo);
return ssrcInfo;
};
/**
* Returns if the peer connection uses Unified plan implementation.
*
* @returns {boolean} True if the pc uses Unified plan, false otherwise.
*/
TraceablePeerConnection.prototype.usesUnifiedPlan = function() {
return this._usesUnifiedPlan;
};
/**
* Creates a text representation of this <tt>TraceablePeerConnection</tt>
* instance.
* @return {string}
*/
TraceablePeerConnection.prototype.toString = function() {
return `TPC[id=${this.id},type=${this.isP2P ? 'P2P' : 'JVB'}]`;
};
| ref(TPC) Remove an unnecessary toUnified sdp conversion.
sLD is called immediately after createOffer/createAnswer, therefore the desc provided by createOffer/createAnswer can be directly passed to sLD without the need for converting it to unified plan format. This also fixes a warning seen on the browser console that says 'The description does not look like plan-b'.
| modules/RTC/TraceablePeerConnection.js | ref(TPC) Remove an unnecessary toUnified sdp conversion. sLD is called immediately after createOffer/createAnswer, therefore the desc provided by createOffer/createAnswer can be directly passed to sLD without the need for converting it to unified plan format. This also fixes a warning seen on the browser console that says 'The description does not look like plan-b'. | <ide><path>odules/RTC/TraceablePeerConnection.js
<ide> * the local description.
<ide> * @private
<ide> */
<del>TraceablePeerConnection.prototype._ensureSimulcastGroupIsLast = function(
<del> localSdp) {
<add>TraceablePeerConnection.prototype._ensureSimulcastGroupIsLast = function(localSdp) {
<ide> let sdpStr = localSdp.sdp;
<ide>
<ide> const videoStartIndex = sdpStr.indexOf('m=video');
<ide> };
<ide>
<ide> TraceablePeerConnection.prototype.setLocalDescription = function(description) {
<del> let localSdp = description;
<del>
<del> this.trace('setLocalDescription::preTransform', dumpSDP(localSdp));
<add> let localDescription = description;
<add>
<add> this.trace('setLocalDescription::preTransform', dumpSDP(localDescription));
<ide>
<ide> // Munge stereo flag and opusMaxAverageBitrate based on config.js
<del> localSdp = this._mungeOpus(localSdp);
<add> localDescription = this._mungeOpus(localDescription);
<ide>
<ide> if (!this._usesUnifiedPlan) {
<del> localSdp = this._adjustLocalMediaDirection(localSdp);
<del> localSdp = this._ensureSimulcastGroupIsLast(localSdp);
<del> } else if (!this.isP2P) {
<del>
<del> // if we're using unified plan, transform to it first.
<del> localSdp = this.interop.toUnifiedPlan(localSdp);
<del> this.trace(
<del> 'setLocalDescription::postTransform (Unified Plan)',
<del> dumpSDP(localSdp));
<add> localDescription = this._adjustLocalMediaDirection(localDescription);
<add> localDescription = this._ensureSimulcastGroupIsLast(localDescription);
<ide> }
<ide>
<ide> // Munge the order of the codecs based on the preferences set through config.js if we are using SDP munging.
<ide> if (!this._usesTransceiverCodecPreferences) {
<del> localSdp = this._mungeCodecOrder(localSdp);
<del> }
<add> localDescription = this._mungeCodecOrder(localDescription);
<add> }
<add>
<add> this.trace('setLocalDescription::postTransform', dumpSDP(localDescription));
<ide>
<ide> return new Promise((resolve, reject) => {
<del> this.peerconnection.setLocalDescription(localSdp)
<add> this.peerconnection.setLocalDescription(localDescription)
<ide> .then(() => {
<ide> this.trace('setLocalDescriptionOnSuccess');
<del> const localUfrag = SDPUtil.getUfrag(localSdp.sdp);
<add> const localUfrag = SDPUtil.getUfrag(localDescription.sdp);
<ide>
<ide> if (localUfrag !== this.localUfrag) {
<ide> this.localUfrag = localUfrag;
<del> this.eventEmitter.emit(
<del> RTCEvents.LOCAL_UFRAG_CHANGED, this, localUfrag);
<add> this.eventEmitter.emit(RTCEvents.LOCAL_UFRAG_CHANGED, this, localUfrag);
<ide> }
<ide> resolve();
<ide> }, err => {
<ide> this.trace('setLocalDescriptionOnFailure', err);
<del> this.eventEmitter.emit(
<del> RTCEvents.SET_LOCAL_DESCRIPTION_FAILED,
<del> err, this);
<add> this.eventEmitter.emit(RTCEvents.SET_LOCAL_DESCRIPTION_FAILED, err, this);
<ide> reject(err);
<ide> });
<ide> }); |
|
Java | apache-2.0 | b367516d3b409e26fa2a280bcb15c2cfedda6762 | 0 | plus-provenance/plus,caturday/plus,caturday/plus,caturday/plus,plus-provenance/plus,caturday/plus,plus-provenance/plus,plus-provenance/plus | /* Copyright 2014 MITRE Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mitre.provenance.services;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.logging.Logger;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.FormParam;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import org.mitre.provenance.PLUSException;
import org.mitre.provenance.dag.LineageDAG;
import org.mitre.provenance.dag.TraversalSettings;
import org.mitre.provenance.db.neo4j.Neo4JPLUSObjectFactory;
import org.mitre.provenance.db.neo4j.Neo4JStorage;
import org.mitre.provenance.plusobject.PLUSObject;
import org.mitre.provenance.plusobject.ProvenanceCollection;
import org.mitre.provenance.plusobject.json.JsonFormatException;
import org.mitre.provenance.plusobject.json.ProvenanceCollectionDeserializer;
import org.mitre.provenance.user.User;
import org.neo4j.cypher.javacompat.ExecutionResult;
import org.neo4j.graphdb.Node;
import org.neo4j.graphdb.Relationship;
import org.neo4j.graphdb.ResourceIterator;
import org.neo4j.graphdb.Transaction;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonParseException;
/**
* DAGServices encompassess RESTful services that operate over provenance "DAGs" (directed acyclic graphs).
* @author dmallen
*/
@Path("/graph")
public class DAGServices {
protected static Logger log = Logger.getLogger(DAGServices.class.getName());
public class CollectionFormatException extends Exception {
private static final long serialVersionUID = 2819285921155590440L;
public CollectionFormatException(String msg) { super(msg); }
}
@GET
@Path("/{oid:.*}")
@Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
/**
* Gets a provenance graph centered at a particular point, in D3 JSON format.
* @param oid the OID for the starting point of the graph
* @return a D3 JSON string, or 404 if not found, or internal server error on PLUSException
*/
public Response getGraph(@Context HttpServletRequest req,
@PathParam("oid") String oid,
@DefaultValue("50") @QueryParam("n") int maxNodes,
@DefaultValue("8") @QueryParam("maxHops") int maxHops,
@DefaultValue("true") @QueryParam("includeNodes") boolean includeNodes,
@DefaultValue("true") @QueryParam("includeEdges") boolean includeEdges,
@DefaultValue("true") @QueryParam("includeNPEs") boolean includeNPEs,
@DefaultValue("true") @QueryParam("followNPIDs") boolean followNPIDs,
@DefaultValue("true") @QueryParam("forward") boolean forward,
@DefaultValue("true") @QueryParam("backward") boolean backward,
@DefaultValue("true") @QueryParam("breadthFirst") boolean breadthFirst) {
TraversalSettings ts = new TraversalSettings();
ts.n = maxNodes;
ts.maxDepth = maxHops;
ts.backward = backward;
ts.forward = forward;
ts.includeNodes = includeNodes;
ts.includeEdges = includeEdges;
ts.includeNPEs = includeNPEs;
ts.followNPIDs = followNPIDs;
ts.breadthFirst = breadthFirst;
log.info("GET D3 GRAPH " + oid + " / " + ts);
if(maxNodes <= 0) return ServiceUtility.BAD_REQUEST("n must be greater than zero");
if(maxHops <= 0) return ServiceUtility.BAD_REQUEST("Max hops must be greater than zero");
try {
if((Neo4JStorage.oidExists(oid) == null) && (Neo4JStorage.getNPID(oid, false) == null))
return Response.status(Response.Status.NOT_FOUND).entity("Entity not found for " + oid).build();
LineageDAG col = Neo4JPLUSObjectFactory.newDAG(oid, ServiceUtility.getUser(req), ts);
log.info("D3 Graph for " + oid + " returned " + col);
return ServiceUtility.OK(col, req);
} catch(PLUSException exc) {
log.severe(exc.getMessage());
exc.printStackTrace();
return ServiceUtility.ERROR(exc.getMessage());
} // End catch
} // End getD3Graph
/**
* This function is needed to check the format of incoming collections to see if they are
* loggable.
* @param col
* @return the same collection, or throws an exception on error.
*/
public ProvenanceCollection checkGraphFormat(ProvenanceCollection col) throws CollectionFormatException {
for(PLUSObject o : col.getNodes()) {
if(Neo4JStorage.oidExists(o.getId()) != null)
throw new CollectionFormatException("Node named " + o.getName() + " / " + o.getId() + " has a duplicate ID");
}
return col;
} // End checkGraphFormat
@SuppressWarnings("unchecked")
@POST
@Produces(MediaType.APPLICATION_JSON)
@Path("/new")
/**
* Creates a new graph in the provenance store. The parameters posted must include an item called "provenance" whose value
* is a D3 JSON object corresponding to the provenance graph that will be created.
* <p>This service will re-allocate new IDs for everything in the graph, and will *not* store the objects under the IDs provided by
* the user, to avoid conflicts on uniqueness.
* @param req
* @param queryParams a set of parameters, which must contain an element "provenance" mapping to a D3 JSON object.
* @return a D3 JSON graph of the provenance that was stored, with new IDs.
* @throws JsonFormatException
*/
public Response newGraph(@Context HttpServletRequest req, @FormParam("provenance") String provenance, MultivaluedMap<String, String> queryParams) throws JsonFormatException {
//String jsonStr = queryParams.getFirst("provenance");
User reportingUser = ServiceUtility.getUser(req);
log.info("NEW GRAPH msg len " + (provenance == null ? "null" : provenance.length()) + " REPORTING USER " + reportingUser);
if(provenance == null) {
Map<String,String[]> params = req.getParameterMap();
System.err.println("DEBUG: bad parameters to newGraph");
for(String k : params.keySet()) {
String[] val = params.get(k);
System.err.println(k + " => " + (val != null && val.length > 0 ? val[0] : "null"));
}
return ServiceUtility.BAD_REQUEST("You must specify a provenance parameter that is not empty.");
}
Gson g = new GsonBuilder().registerTypeAdapter(ProvenanceCollection.class, new ProvenanceCollectionDeserializer()).create();
ProvenanceCollection col = null;
try {
col = g.fromJson(provenance, ProvenanceCollection.class);
System.out.println("Converted from D3 JSON: " + col);
// Check format, and throw an exception if it's no good.
col = checkGraphFormat(col);
System.out.println("Tagging source...");
col = tagSource(col, req);
/* for many reasons, this is a bad idea. leave stubbed out for now.
System.out.println("Resetting IDs...");
col = resetIDs(col);
*/
Neo4JStorage.store(col);
} catch(CollectionFormatException gfe) {
log.warning("Failed storing collection: " + gfe.getMessage());
return ServiceUtility.BAD_REQUEST("Your collection contained a format problem: " + gfe.getMessage());
} catch(JsonParseException j) {
j.printStackTrace();
return ServiceUtility.BAD_REQUEST(j.getMessage());
} catch(PLUSException exc) {
return ServiceUtility.ERROR(exc.getMessage());
}
return ServiceUtility.OK(col);
} // End newGraph
@POST
@Produces(MediaType.APPLICATION_JSON)
@Path("/search")
/**
* Search the provenance store for objects with a particular cypher query.
* @param cypherQuery the cypher query
* @return a D3 JSON formatted provenance collection
* @deprecated
*/
public Response search(@FormParam("query") String cypherQuery) {
int limit = 100;
// log.info("SEARCH " + cypherQuery);
if(cypherQuery == null || "".equals(cypherQuery)) {
return ServiceUtility.BAD_REQUEST("No query");
}
// Ban certain "stop words" from the query to prevent users from updating, deleting, or
// creating data.
String [] stopWords = new String [] { "create", "delete", "set", "remove", "foreach", "merge" };
String q = cypherQuery.toLowerCase();
for(String sw : stopWords) {
if(q.contains(sw))
return ServiceUtility.BAD_REQUEST("Invalid query specified (" + sw + ")");
} // End for
/* Begin executing query */
ProvenanceCollection col = new ProvenanceCollection();
try (Transaction tx = Neo4JStorage.beginTx()) {
log.info("Query for " + cypherQuery);
ExecutionResult rs = Neo4JStorage.execute(cypherQuery);
for(String colName : rs.columns()) {
int x=0;
ResourceIterator<?> it = rs.columnAs(colName);
while(it.hasNext() && x < limit) {
Object next = it.next();
if(next instanceof Node) {
if(Neo4JStorage.isPLUSObjectNode((Node)next))
col.addNode(Neo4JPLUSObjectFactory.newObject((Node)next));
else {
log.info("Skipping non-provnenace object node ID " + ((Node)next).getId());
continue;
}
} else if(next instanceof Relationship) {
Relationship rel = (Relationship)next;
if(Neo4JStorage.isPLUSObjectNode(rel.getStartNode()) &&
Neo4JStorage.isPLUSObjectNode(rel.getEndNode())) {
col.addNode(Neo4JPLUSObjectFactory.newObject(rel.getStartNode()));
col.addNode(Neo4JPLUSObjectFactory.newObject(rel.getEndNode()));
col.addEdge(Neo4JPLUSObjectFactory.newEdge(rel));
} else {
log.info("Skipping non-provenace edge not yet supported " + rel.getId());
}
}
} // End while
it.close();
if((col.countEdges() + col.countNodes()) >= limit) break;
}
tx.success();
} catch(Exception exc) {
exc.printStackTrace();
return ServiceUtility.ERROR(exc.getMessage());
}
return ServiceUtility.OK(col);
} // End search
protected Object formatLimitedSearchResult(Object o) {
if(o instanceof Node) {
Node n = (Node)o;
if(Neo4JStorage.isPLUSObjectNode(n)) {
HashMap<String,Object> nodeProps = new HashMap<String,Object>();
nodeProps.put("oid", n.getProperty("oid"));
nodeProps.put("name", n.getProperty("name", "Unknown"));
return nodeProps;
} else {
log.info("Skipping non-provenance object node ID " + n.getId());
return null;
}
} else if(o instanceof Relationship) {
Relationship r = (Relationship)o;
if(Neo4JStorage.isPLUSObjectNode(r.getStartNode()) &&
Neo4JStorage.isPLUSObjectNode(r.getEndNode())) {
//TODO ;
}
HashMap<String,Object> relProps = new HashMap<String,Object>();
relProps.put("from", formatLimitedSearchResult(r.getStartNode()));
relProps.put("to", formatLimitedSearchResult(r.getEndNode()));
relProps.put("type", r.getType().name());
return relProps;
} else if(o instanceof Iterable) {
ArrayList<Object> things = new ArrayList<Object>();
for(Object so : (Iterable<?>)o) {
Object ro = formatLimitedSearchResult(so);
if(ro != null) things.add(ro);
}
return things;
} else {
log.info("Unsupported query response type " + o.getClass().getCanonicalName());
}
return null;
} // End formatLimitedSearchResult
/**
* This method resets all of the unique identifiers. We can't trust identifiers coming from
* external clients to be truly globally unique, so we re-generate our own. This means we have to
* keep track of references to other nodes within the edge table.
*
* This method also REMOVES dangling edges from the store.
* @param col
* @return
*
protected ProvenanceCollection resetIDs(ProvenanceCollection col) throws JsonFormatException {
HashMap<String,String> idMapping = new HashMap<String,String>();
// Generate new IDs for all objects the user is reporting; store the mapping.
for(PLUSObject o : col.getNodes().values()) {
String id = o.getId();
String newID = PLUSUtils.generateID();
if(idMapping.containsKey(id))
throw new JsonFormatException("The converted provenance collection contains a duplicate node entry under " + id);
o.setId(newID);
idMapping.put(id, newID);
}
// Re-write each of the user-reported edges, to refer to the nodes we just re-ID'd.
for(PLUSEdge e : col.getEdges().values()) {
String oldFrom = e.getFrom();
String oldTo = e.getTo();
String newFrom = idMapping.get(oldFrom);
String newTo = idMapping.get(oldTo);
if(newFrom != null)
e.setFrom(newFrom);
else {
//In some cases, the user will refer to a node that isn't in the set of what they're logging,
//but which *does* exist. That's OK -- just keep the original ID they reported. If the ID they're
//reporting references nothing, skip the edge.
//
if(Neo4JStorage.oidExists(oldFrom) == null) {
log.warning("Can't log danging edge " + e + " with non-existant node " + oldFrom);
continue;
} else newFrom = oldFrom;
}
if(newTo != null)
e.setTo(newTo);
else {
if(Neo4JStorage.oidExists(oldTo) == null) {
log.warning("Can't log danging edge " + e + " with non-existant node " + oldTo);
continue;
} else newTo = oldTo;
}
} // End for
for(NonProvenanceEdge npe : col.getNonProvenanceEdges().values()) {
String old = npe.getIncidentOID();
String newOID = idMapping.get(old);
if(newOID == null) {
if(Neo4JStorage.oidExists(old) != null) newOID = old;
else log.warning("Dangling NPE on " + npe);
} else {
if(PLUSUtils.isPLUSOID(npe.getFrom())) npe.setFrom(newOID);
else npe.setTo(newOID);
}
} // End for
return col;
} // End resetIDs
*/
/**
* Tag each of the objects in a provenance collection with information about the user that
* posted them.
* @param col the provenance collection to tag
* @param req the request that created the provenenace collection
* @return the modified collection
*/
protected ProvenanceCollection tagSource(ProvenanceCollection col, HttpServletRequest req) {
String addr = req.getRemoteAddr();
String host = req.getRemoteHost();
String user = req.getRemoteUser();
String ua = req.getHeader("User-Agent");
String tag = (user != null ? user : "unknown") + "@" +
host + " " +
(host.equals(addr) ? "" : "(" + addr + ") ") +
ua;
long reportTime = System.currentTimeMillis();
for(PLUSObject o : col.getNodes()) {
o.getMetadata().put("plus:reporter", tag);
o.getMetadata().put("plus:reportTime", reportTime);
}
return col;
} // End tagSource
} // End DAGServices
| src/main/java/org/mitre/provenance/services/DAGServices.java | /* Copyright 2014 MITRE Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mitre.provenance.services;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.logging.Logger;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.FormParam;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import org.mitre.provenance.PLUSException;
import org.mitre.provenance.dag.LineageDAG;
import org.mitre.provenance.dag.TraversalSettings;
import org.mitre.provenance.db.neo4j.Neo4JPLUSObjectFactory;
import org.mitre.provenance.db.neo4j.Neo4JStorage;
import org.mitre.provenance.plusobject.PLUSObject;
import org.mitre.provenance.plusobject.ProvenanceCollection;
import org.mitre.provenance.plusobject.json.JsonFormatException;
import org.mitre.provenance.plusobject.json.ProvenanceCollectionDeserializer;
import org.mitre.provenance.user.User;
import org.neo4j.cypher.javacompat.ExecutionResult;
import org.neo4j.graphdb.Node;
import org.neo4j.graphdb.Relationship;
import org.neo4j.graphdb.ResourceIterator;
import org.neo4j.graphdb.Transaction;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonParseException;
/**
* DAGServices encompassess RESTful services that operate over provenance "DAGs" (directed acyclic graphs).
* @author dmallen
*/
@Path("/graph")
public class DAGServices {
protected static Logger log = Logger.getLogger(DAGServices.class.getName());
public class CollectionFormatException extends Exception {
private static final long serialVersionUID = 2819285921155590440L;
public CollectionFormatException(String msg) { super(msg); }
}
@GET
@Path("/{oid:.*}")
@Produces(MediaType.APPLICATION_JSON)
/**
* Gets a provenance graph centered at a particular point, in D3 JSON format.
* @param oid the OID for the starting point of the graph
* @return a D3 JSON string, or 404 if not found, or internal server error on PLUSException
*/
public Response getD3Graph(@Context HttpServletRequest req,
@PathParam("oid") String oid,
@DefaultValue("50") @QueryParam("n") int maxNodes,
@DefaultValue("8") @QueryParam("maxHops") int maxHops,
@DefaultValue("true") @QueryParam("includeNodes") boolean includeNodes,
@DefaultValue("true") @QueryParam("includeEdges") boolean includeEdges,
@DefaultValue("true") @QueryParam("includeNPEs") boolean includeNPEs,
@DefaultValue("true") @QueryParam("followNPIDs") boolean followNPIDs,
@DefaultValue("true") @QueryParam("forward") boolean forward,
@DefaultValue("true") @QueryParam("backward") boolean backward,
@DefaultValue("true") @QueryParam("breadthFirst") boolean breadthFirst) {
TraversalSettings ts = new TraversalSettings();
ts.n = maxNodes;
ts.maxDepth = maxHops;
ts.backward = backward;
ts.forward = forward;
ts.includeNodes = includeNodes;
ts.includeEdges = includeEdges;
ts.includeNPEs = includeNPEs;
ts.followNPIDs = followNPIDs;
ts.breadthFirst = breadthFirst;
log.info("GET D3 GRAPH " + oid + " / " + ts);
if(maxNodes <= 0) return ServiceUtility.BAD_REQUEST("n must be greater than zero");
if(maxHops <= 0) return ServiceUtility.BAD_REQUEST("Max hops must be greater than zero");
try {
if((Neo4JStorage.oidExists(oid) == null) && (Neo4JStorage.getNPID(oid, false) == null))
return Response.status(Response.Status.NOT_FOUND).entity("Entity not found for " + oid).build();
LineageDAG col = Neo4JPLUSObjectFactory.newDAG(oid, ServiceUtility.getUser(req), ts);
log.info("D3 Graph for " + oid + " returned " + col);
return ServiceUtility.OK(col);
} catch(PLUSException exc) {
log.severe(exc.getMessage());
exc.printStackTrace();
return ServiceUtility.ERROR(exc.getMessage());
} // End catch
} // End getD3Graph
/**
* This function is needed to check the format of incoming collections to see if they are
* loggable.
* @param col
* @return the same collection, or throws an exception on error.
*/
public ProvenanceCollection checkGraphFormat(ProvenanceCollection col) throws CollectionFormatException {
for(PLUSObject o : col.getNodes()) {
if(Neo4JStorage.oidExists(o.getId()) != null)
throw new CollectionFormatException("Node named " + o.getName() + " / " + o.getId() + " has a duplicate ID");
}
return col;
} // End checkGraphFormat
@SuppressWarnings("unchecked")
@POST
@Produces(MediaType.APPLICATION_JSON)
@Path("/new")
/**
* Creates a new graph in the provenance store. The parameters posted must include an item called "provenance" whose value
* is a D3 JSON object corresponding to the provenance graph that will be created.
* <p>This service will re-allocate new IDs for everything in the graph, and will *not* store the objects under the IDs provided by
* the user, to avoid conflicts on uniqueness.
* @param req
* @param queryParams a set of parameters, which must contain an element "provenance" mapping to a D3 JSON object.
* @return a D3 JSON graph of the provenance that was stored, with new IDs.
* @throws JsonFormatException
*/
public Response newGraph(@Context HttpServletRequest req, @FormParam("provenance") String provenance, MultivaluedMap<String, String> queryParams) throws JsonFormatException {
//String jsonStr = queryParams.getFirst("provenance");
User reportingUser = ServiceUtility.getUser(req);
log.info("NEW GRAPH msg len " + (provenance == null ? "null" : provenance.length()) + " REPORTING USER " + reportingUser);
if(provenance == null) {
Map<String,String[]> params = req.getParameterMap();
System.err.println("DEBUG: bad parameters to newGraph");
for(String k : params.keySet()) {
String[] val = params.get(k);
System.err.println(k + " => " + (val != null && val.length > 0 ? val[0] : "null"));
}
return ServiceUtility.BAD_REQUEST("You must specify a provenance parameter that is not empty.");
}
Gson g = new GsonBuilder().registerTypeAdapter(ProvenanceCollection.class, new ProvenanceCollectionDeserializer()).create();
ProvenanceCollection col = null;
try {
col = g.fromJson(provenance, ProvenanceCollection.class);
System.out.println("Converted from D3 JSON: " + col);
// Check format, and throw an exception if it's no good.
col = checkGraphFormat(col);
System.out.println("Tagging source...");
col = tagSource(col, req);
/* for many reasons, this is a bad idea. leave stubbed out for now.
System.out.println("Resetting IDs...");
col = resetIDs(col);
*/
Neo4JStorage.store(col);
} catch(CollectionFormatException gfe) {
log.warning("Failed storing collection: " + gfe.getMessage());
return ServiceUtility.BAD_REQUEST("Your collection contained a format problem: " + gfe.getMessage());
} catch(JsonParseException j) {
j.printStackTrace();
return ServiceUtility.BAD_REQUEST(j.getMessage());
} catch(PLUSException exc) {
return ServiceUtility.ERROR(exc.getMessage());
}
return ServiceUtility.OK(col);
} // End newGraph
@POST
@Produces(MediaType.APPLICATION_JSON)
@Path("/search")
/**
* Search the provenance store for objects with a particular cypher query.
* @param cypherQuery the cypher query
* @return a D3 JSON formatted provenance collection
* @deprecated
*/
public Response search(@FormParam("query") String cypherQuery) {
int limit = 100;
// log.info("SEARCH " + cypherQuery);
if(cypherQuery == null || "".equals(cypherQuery)) {
return ServiceUtility.BAD_REQUEST("No query");
}
// Ban certain "stop words" from the query to prevent users from updating, deleting, or
// creating data.
String [] stopWords = new String [] { "create", "delete", "set", "remove", "foreach", "merge" };
String q = cypherQuery.toLowerCase();
for(String sw : stopWords) {
if(q.contains(sw))
return ServiceUtility.BAD_REQUEST("Invalid query specified (" + sw + ")");
} // End for
/* Begin executing query */
ProvenanceCollection col = new ProvenanceCollection();
try (Transaction tx = Neo4JStorage.beginTx()) {
log.info("Query for " + cypherQuery);
ExecutionResult rs = Neo4JStorage.execute(cypherQuery);
for(String colName : rs.columns()) {
int x=0;
ResourceIterator<?> it = rs.columnAs(colName);
while(it.hasNext() && x < limit) {
Object next = it.next();
if(next instanceof Node) {
if(Neo4JStorage.isPLUSObjectNode((Node)next))
col.addNode(Neo4JPLUSObjectFactory.newObject((Node)next));
else {
log.info("Skipping non-provnenace object node ID " + ((Node)next).getId());
continue;
}
} else if(next instanceof Relationship) {
Relationship rel = (Relationship)next;
if(Neo4JStorage.isPLUSObjectNode(rel.getStartNode()) &&
Neo4JStorage.isPLUSObjectNode(rel.getEndNode())) {
col.addNode(Neo4JPLUSObjectFactory.newObject(rel.getStartNode()));
col.addNode(Neo4JPLUSObjectFactory.newObject(rel.getEndNode()));
col.addEdge(Neo4JPLUSObjectFactory.newEdge(rel));
} else {
log.info("Skipping non-provenace edge not yet supported " + rel.getId());
}
}
} // End while
it.close();
if((col.countEdges() + col.countNodes()) >= limit) break;
}
tx.success();
} catch(Exception exc) {
exc.printStackTrace();
return ServiceUtility.ERROR(exc.getMessage());
}
return ServiceUtility.OK(col);
} // End search
protected Object formatLimitedSearchResult(Object o) {
if(o instanceof Node) {
Node n = (Node)o;
if(Neo4JStorage.isPLUSObjectNode(n)) {
HashMap<String,Object> nodeProps = new HashMap<String,Object>();
nodeProps.put("oid", n.getProperty("oid"));
nodeProps.put("name", n.getProperty("name", "Unknown"));
return nodeProps;
} else {
log.info("Skipping non-provenance object node ID " + n.getId());
return null;
}
} else if(o instanceof Relationship) {
Relationship r = (Relationship)o;
if(Neo4JStorage.isPLUSObjectNode(r.getStartNode()) &&
Neo4JStorage.isPLUSObjectNode(r.getEndNode())) {
//TODO ;
}
HashMap<String,Object> relProps = new HashMap<String,Object>();
relProps.put("from", formatLimitedSearchResult(r.getStartNode()));
relProps.put("to", formatLimitedSearchResult(r.getEndNode()));
relProps.put("type", r.getType().name());
return relProps;
} else if(o instanceof Iterable) {
ArrayList<Object> things = new ArrayList<Object>();
for(Object so : (Iterable<?>)o) {
Object ro = formatLimitedSearchResult(so);
if(ro != null) things.add(ro);
}
return things;
} else {
log.info("Unsupported query response type " + o.getClass().getCanonicalName());
}
return null;
} // End formatLimitedSearchResult
/**
* This method resets all of the unique identifiers. We can't trust identifiers coming from
* external clients to be truly globally unique, so we re-generate our own. This means we have to
* keep track of references to other nodes within the edge table.
*
* This method also REMOVES dangling edges from the store.
* @param col
* @return
*
protected ProvenanceCollection resetIDs(ProvenanceCollection col) throws JsonFormatException {
HashMap<String,String> idMapping = new HashMap<String,String>();
// Generate new IDs for all objects the user is reporting; store the mapping.
for(PLUSObject o : col.getNodes().values()) {
String id = o.getId();
String newID = PLUSUtils.generateID();
if(idMapping.containsKey(id))
throw new JsonFormatException("The converted provenance collection contains a duplicate node entry under " + id);
o.setId(newID);
idMapping.put(id, newID);
}
// Re-write each of the user-reported edges, to refer to the nodes we just re-ID'd.
for(PLUSEdge e : col.getEdges().values()) {
String oldFrom = e.getFrom();
String oldTo = e.getTo();
String newFrom = idMapping.get(oldFrom);
String newTo = idMapping.get(oldTo);
if(newFrom != null)
e.setFrom(newFrom);
else {
//In some cases, the user will refer to a node that isn't in the set of what they're logging,
//but which *does* exist. That's OK -- just keep the original ID they reported. If the ID they're
//reporting references nothing, skip the edge.
//
if(Neo4JStorage.oidExists(oldFrom) == null) {
log.warning("Can't log danging edge " + e + " with non-existant node " + oldFrom);
continue;
} else newFrom = oldFrom;
}
if(newTo != null)
e.setTo(newTo);
else {
if(Neo4JStorage.oidExists(oldTo) == null) {
log.warning("Can't log danging edge " + e + " with non-existant node " + oldTo);
continue;
} else newTo = oldTo;
}
} // End for
for(NonProvenanceEdge npe : col.getNonProvenanceEdges().values()) {
String old = npe.getIncidentOID();
String newOID = idMapping.get(old);
if(newOID == null) {
if(Neo4JStorage.oidExists(old) != null) newOID = old;
else log.warning("Dangling NPE on " + npe);
} else {
if(PLUSUtils.isPLUSOID(npe.getFrom())) npe.setFrom(newOID);
else npe.setTo(newOID);
}
} // End for
return col;
} // End resetIDs
*/
/**
* Tag each of the objects in a provenance collection with information about the user that
* posted them.
* @param col the provenance collection to tag
* @param req the request that created the provenenace collection
* @return the modified collection
*/
protected ProvenanceCollection tagSource(ProvenanceCollection col, HttpServletRequest req) {
String addr = req.getRemoteAddr();
String host = req.getRemoteHost();
String user = req.getRemoteUser();
String ua = req.getHeader("User-Agent");
String tag = (user != null ? user : "unknown") + "@" +
host + " " +
(host.equals(addr) ? "" : "(" + addr + ") ") +
ua;
long reportTime = System.currentTimeMillis();
for(PLUSObject o : col.getNodes()) {
o.getMetadata().put("plus:reporter", tag);
o.getMetadata().put("plus:reportTime", reportTime);
}
return col;
} // End tagSource
} // End DAGServices
| made getGraph() more generic, permitted PROV-XML serialization
| src/main/java/org/mitre/provenance/services/DAGServices.java | made getGraph() more generic, permitted PROV-XML serialization | <ide><path>rc/main/java/org/mitre/provenance/services/DAGServices.java
<ide>
<ide> @GET
<ide> @Path("/{oid:.*}")
<del> @Produces(MediaType.APPLICATION_JSON)
<add> @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML})
<ide> /**
<ide> * Gets a provenance graph centered at a particular point, in D3 JSON format.
<ide> * @param oid the OID for the starting point of the graph
<ide> * @return a D3 JSON string, or 404 if not found, or internal server error on PLUSException
<ide> */
<del> public Response getD3Graph(@Context HttpServletRequest req,
<add> public Response getGraph(@Context HttpServletRequest req,
<ide> @PathParam("oid") String oid,
<ide> @DefaultValue("50") @QueryParam("n") int maxNodes,
<ide> @DefaultValue("8") @QueryParam("maxHops") int maxHops,
<ide>
<ide> LineageDAG col = Neo4JPLUSObjectFactory.newDAG(oid, ServiceUtility.getUser(req), ts);
<ide> log.info("D3 Graph for " + oid + " returned " + col);
<del> return ServiceUtility.OK(col);
<add>
<add> return ServiceUtility.OK(col, req);
<ide> } catch(PLUSException exc) {
<ide> log.severe(exc.getMessage());
<ide> exc.printStackTrace(); |
|
JavaScript | mit | c44c4920df51e08bedd58547df7e9058b52f80bd | 0 | PittsburghPG/pa-campaign-finance,PittsburghPG/pa-campaign-finance | $(document).ready(function() {
/* var mapOptions = {
zoom: 8,
center: new google.maps.LatLng(40.4450813, -80.00877459999998),
disableDefaultUI: true
};
map = new google.maps.Map(document.getElementById('map_canvas'),
mapOptions);*/
//parse url
var pathname = window.location.pathname; //get current url
var split = pathname.split("/"); //treat it like a string, break it up where the /'s are
//console.log(split[2]);
//case statement
switch(split[2]) { //the second item in the array will be the type of page this will be
case "candidates":
$('#bycandidate').addClass('active'); //make the dropdown menu active on the correct item
var candName = decodeURIComponent(split[3]); //grab the candidate name from the url
console.log(toTitleCase(candName));
break;
case "counties":
$('#bycounty').addClass('active');
var countyName = split[3];
console.log("one more tiiiiiiiiiime");
console.log("we're gonna celebrate");
break;
case "contributors":
$('#bycontributor').addClass('active');
var contribID = split[3];
$.getJSON("/api/contributors/" + contribID, function(data){
//var need = "{need variable}";
//intro row
var container = $("#main");
introRow = $("<div></div>").appendTo(container);
introRow.addClass("row intro-row");
var introLabel = $("<label>CONTRIBUTOR</label>");
introLabel.appendTo(introRow);
var jumbotron = $("<div></div>").appendTo(introRow);
jumbotron.addClass("jumbotron");
var contribName = data.results[0].contributor;
var headerAmt = $("<h1>" + contribName + "</h1><div class='thin-divider'></div>");
headerAmt.appendTo(jumbotron);
var contribJob = data.results[0].occupation;
var contribEmp = data.results[0].empName;
//console.log(contribJob.length);
var contribTitle = "";
if((contribJob.length > 0) && (contribEmp.length > 0)){
contribTitle = "<small>" + contribJob + ", " + contribEmp + "</small>";
}else if ((contribJob.length > 0) && (contribEmp.length == 0)){
contribTitle = "<small>" + contribJob + "</small>";
}else if ((contribJob.length == 0) && (contribEmp.length > 0)){
contribTitle = ", <small>" + contribEmp + "</small>";
}else {
contribTitle = "";
};
$(contribTitle).appendTo(headerAmt);
var thinDivider = $("<div class='thin-divider'></div>");
thinDivider.appendTo(jumbotron);
// top totals row
var topTotals = $("<div class='row top-totals'></div>").appendTo(container);
var topTotalsLeft = $("<div class='col-lg-4 col-md-4 col-sm-4 col-xs-12 block first'></div>").appendTo(topTotals);
// Location block
var locationCity = data.results[0].city;
var locationCounty = data.results[0].county;
var topTotalsLeftLocationBlock = $("<div class='row'> \
<div class='col-lg-12 col-md-12 col-sm-12'> \
<label>LOCATION</label><h3>" + locationCity + "</h3> \
<p><a href='/a/counties/" + locationCounty + "'>" + locationCounty + " County</a></p> \
</div> \
</div> \
<div class='thin-divider'></div>"
).appendTo(topTotalsLeft);
thinDivider.appendTo(topTotalsLeft);
//Top contributed block
var topTotalsContrib = $("<div class='row'></div>").appendTo(topTotalsLeft);
var topTotalsContribCol12 = $("<div class='col-lg-12 col-md-12 col-sm-12'></div>").appendTo(topTotalsContrib);
var contributionTotal = data.results[0].amount;
var topTotalsContribLabel = $("<label>TOTAL CONTRIBUTED</label><h3>" + toDollars(contributionTotal) + "</h3>").appendTo(topTotalsContribCol12);
//Candidate breakdown table
var topTotalsContribCandidate = $("<div class='row'></div>").appendTo(topTotalsContribCol12);
var topTotalsContribCandidateCol12 = $("<div class='col-lg-12 col-md-12 col-sm-12'></div>").appendTo(topTotalsContribCandidate);
var topTotalsContribCandidateTable = $("<table class='horizontal-bar-graph'></table>").appendTo(topTotalsContribCandidateCol12);
//Corbett_Wolf variables and logic
var wolfContributionAmt = "";
var wolfContributionNum = "";
var results = data.results[0].beneficiaries;
$.each(results, function(i, item){
if(results[i].filerid == "20130153"){
wolfContributionAmt = results[i].amount;
wolfContributionNum = results[i].contributions;
}else {
wolfContributionAmt = "0";
wolfContributionNum = "0";
}
});
var corbettContributionAmt = "";
var corbettContributionNum = "";
var results = data.results[0].beneficiaries;
$.each(results, function(i, item){
if(results[i].filerid == "2009216"){
corbettContributionAmt = results[i].amount;
corbettContributionNum = results[i].contributions;
}else {
corbettContributionAmt = "0";
corbettContributionNum = "0";
}
});
var wolfBarWidth = "";
var corbettBarWidth = "";
if(wolfContributionAmt > corbettContributionAmt){
wolfBarWidth = "100";
corbettBarWidth = (corbettContributionAmt)/(wolfContributionAmt);
}else if(corbettContributionAmt > wolfContributionAmt){
corbettBarWidth = "100";
wolfBarWidth = (wolfContributionAmt)/(corbettContributionAmt);
} else{
corbettBarWidth = "100";
wolfBarWidth = "100";
};
//Corbett row --> need to make graphic length respect amt donated
var topTotalsCorbettRow = $("<tr><td><strong>Corbett</strong></td><td><div class='bar republican' style='width:" + corbettBarWidth +"%; color:#000000;'></div><span style='overflow:visible;'>" + corbettContributionAmt + " (" + corbettContributionNum + " contributions)" + "</span></td></tr>").appendTo(topTotalsContribCandidateTable);
//Wolf row --> need to make graphic length respect amt donated
var topTotalsWolfRow = $("<tr><td><strong>Wolf</strong></td><td><div class='bar democrat' style='width:" + wolfBarWidth + "%; color:#000000;'></div><span style='overflow:visible;'>" + wolfContributionAmt + " (" + wolfContributionNum + " contributions)" + "</span></td></tr>").appendTo(topTotalsContribCandidateTable);
//Overtime
var topTotalsOvertime = $("<div class='col-lg-8 col-md-8 col-sm-8 col-xs-12 block last'>").appendTo(topTotals);
var topTotalsOvertimeGraph = $("<h3>Contributions over time</h3><div id='timechart' style='width:100%; height:400px'></div>").appendTo(topTotalsOvertime);
makeTimeChart("timechart", "contributors", contribID, "2013-01-01", "2014-11-01");
thinDivider.appendTo(container);
container.append(thinDivider);
container.append('<div class="row tabular"> \
<div class="col-lg-12 col-md-12 col-sm-12"> \
<h3>All donations by ' + contribName +'</h3> \
<form class="form-inline pull-right"> \
<input type="search" class="form-control" placeholder="Search"> \
<button type="submit" class="btn btn-default">Submit</button> \
</form> \
<table class="table table-hover sortable"> \
<thead> \
<tr> \
<th>Date</th>\
<th>Candidate/PAC</th> \
<th>Amount</th> \
</tr> \
</thead> \
<tbody></tbody> \
</table> \
</div> \
</div> ');
//get contributor data
$.getJSON("/api/contributions/contributors/" + contribID, function(data){
$.each(data.results, function(i, result){
$('.tabular tbody').append("<tr><td><a href='/a/contributions/" + result.id + "'>" + result.date + "</a></td><td>" + result.name + "</td><td>" + toDollars(result.contribution) + "</td></tr>");
});
});
});
break;
case "contributions":
$('#bycontributor').addClass('active');
var contributionID = split[3];
$.getJSON("/api/contributions/" + contributionID, function(data){
var contribName = data.results[0].contributor;
var contribID = data.results[0].contributorid;
var contributionAmt = "$" + data.results[0].contribution;
var conDateRaw = data.results[0].date;
var d = new Date(conDateRaw);
var con_date = d.getDate(conDateRaw);
var con_month = d.getMonth(conDateRaw);
var con_year = d.getFullYear(conDateRaw);
var contributionDate = con_month + "/" + con_date + "/" + con_year;
var filerID = data.results[0].filerid;
$.getJSON("/api/contributors/" + contribID, function(data){
var locationCity = data.results[0].city;
var locationZip = data.results[0].zip;
locationZip = locationZip.substring(0,5);
$.getJSON("/api/candidates/", function(data){
var candidateName = "";
var results = data.results;
$.each(results, function(i, item){
if(results[i].filerid == filerID){
candidateName = results[i].name;
}
});
//intro row
var container = $("#main");
introRow = $("<div></div>").appendTo(container);
introRow.addClass("row intro-row");
var introLabel = $("<label>CONTRIBUTION</label>");
introLabel.appendTo(introRow);
var jumbotron = $("<div></div>").appendTo(introRow);
jumbotron.addClass("jumbotron");
var headerAmt = $("<h1>" + contributionAmt + "</h1>");
headerAmt.appendTo(jumbotron);
var thinDivider = $("<div class='thin-divider'></div>");
thinDivider.appendTo(jumbotron);
//donation info row
contributionRow = $("<div></div>").appendTo(container);
contributionRow.addClass("row");
var colmd5 = "col-md-5";
var collg7 = "col-lg-7";
var colmd7 = "col-md-7";
var colsm7 = "col-sm-7";
contributionColumn = $("<div></div>").appendTo(contributionRow);
contributionColumn.addClass(colmd5);
var h3 = "<h3></h3>";
var contribItem = $(h3).appendTo(contributionColumn);
contribItem.addClass("donor-item");
var contribLabel = $("<strong>Contributor: </strong>").appendTo(contribItem);
var contribLink = $("<a>" + contribName + "</a>").appendTo(contribItem);
contribLink.attr("href", "/a/contributors/" + contribID);
var candidateItem = $(h3).appendTo(contributionColumn);
candidateItem.addClass("candidate-item");
var candidateLabel = $("<strong>Candidate: </strong>").appendTo(candidateItem);
var candidateLink = $("<a>" + candidateName + "</a>").appendTo(candidateItem);
candidateLink.attr("href", "/a/candidates/" + candidateName);
var dateItem = $(h3).appendTo(contributionColumn);
dateItem.addClass("date-item");
var dateLabel = $("<strong>Date: </strong>").appendTo(dateItem);
var dateData = $("<span>" + contributionDate + "</span>").appendTo(dateItem);
var locationItem = $(h3).appendTo(contributionColumn);
locationItem.addClass("location-item");
var locationLabel = $("<strong>Location: </strong>").appendTo(locationItem);
var locationData = $("<span>" + locationCity + ", " + locationZip + "</span>").appendTo(locationItem);
function initialize() {
var mapOptions = {
center: { lat: -34.397, lng: 150.644},
zoom: 8
};
var map = new google.maps.Map(document.getElementById('map-canvas'), mapOptions);
};
google.maps.event.addDomListener(window, 'load', initialize);
mapColumn = $("<div class='col-lg-7 col-md-7 col-sm-7' id='map-canvas' style='height:100%;'></div>").appendTo(contributionRow);
$(map).appendTo(mapColumn);
//var countyMap = $("<img class='sm_county_map' src='/img/allegheny-map.png' alt='Allegheny County locator map'>").appendTo(mapColumn);
});
});
});
break;
default:
//default //code block
}
});
function toTitleCase(str)
{
return str.replace(/\w\S*/g, function(txt){return txt.charAt(0).toUpperCase() + txt.substr(1).toLowerCase();});
}
// Converts XXXXXX.XXXXX to $XXX,XXX.XX
function toDollars(x){
return "$" + numberWithCommas( Math.floor(x * 100) / 100 );
}
// Converts XXXXXXXX to XX,XXX,XXX
function numberWithCommas(x){
return x.toString().replace(/\B(?=(\d{3})+(?!\d))/g, ",");
}
function makeTimeChart(id, endpoint, target, startDate, endDate){
$.getJSON("api/months/" + endpoint + "/" + target + "?startDate=" + startDate + "&endDate=" + endDate, function(json){
data = [];
$.each(json.results, function(i, date){
data.push( [Date.parse(date["year"] + "-" + date["month"] + "-" + "01 05:01:00"), date["total"]] );
$.plot("#" + id, [{
data: data,
color:"seagreen"
}], {
series: {
lines:{
lineWidth: 6,
show:true
},
points:{
show:true
}
},
xaxis: {
mode: "time",
min: data[0][0]
},
yaxis: {
tickFormatter: function(val, axis){
return "$" + numberWithCommas(val);
},
min: 0
},
grid: {
borderWidth: {
top: 0,
left: 1,
right: 0,
bottom: 1
},
hoverable: true
},
markings:0
});
$("#" + id).bind("plothover", function(event, pos, item){
if( item ) {
if( $("#tooltip").length == 0 ){
$("<div id='tooltip'></div>").appendTo( $("body") )
.css({top: item.clientY+5, left: item.clientX+5});
$("#tooltip").html("<div class='date'>" + ( new Date(item.datapoint[0]).getMonth() + 1 ) + "/" + new Date(item.datapoint[0]).getFullYear() + "</div><div class='text'>" + toDollars(item.datapoint[1]) + "</div>");
console.log(item.datapoint[0]);
}
else {
$("#tooltip").css({top: item.clientY-20, left: item.clientX+10});
}
x = item.datapoint[0].toFixed(2);
y = item.datapoint[1].toFixed(2);
}
else $("#tooltip").remove();
});
});
});
}
function makePieChart(id, target, target_state){
$.getJSON("api/states/candidates/" + target, function(json) {
data = [];
$.each(json.results, function(i, state){
if( state.state == target_state ) data[1] = { label: "In-state", data: state.amount};
else if ( i == 0 ) data.push({ label: "Out of state", data: parseFloat(state.amount)})
else data[0].data += parseFloat(state.amount);
});
$.plot('#' + id, data, {
series: {
pie: {
show: true,
hoverable: true
}
},
grid: {
hoverable: true
}
});
$("#" + id).bind("plothover", function(e, pos, item){
if( item ) {
if( $("#tooltip").length == 0 ){
$("<div id='tooltip'></div>").appendTo( $("body") );
}
$("#tooltip").css({top: pos.pageY-20, left: pos.pageX+10});
$("#tooltip").html("<div class='text'>" + Math.floor(item.datapoint[0]) + "%</div>");
}
else $("#tooltip").remove();
});
});
}
function drawCandidateMap( id ){
map = d3.select("#" + id);
w = $(map.node()).width();
h = $(map.node()).height();
var counties = [];
$.getJSON("api/counties", function(countiesJSON){
$.each(countiesJSON.results, function(i, county){
county.beneficiaries.sort(function(a,b){
return +b.amount - +a.amount;
});
counties[county.county] = { county: county.county, winner: county.beneficiaries[0].party, candidates: county.beneficiaries };
});
d3.json("js/min.pennsylvania.json", function(error, json) {
// Join shapefile data
county = d3.select("#map")
.selectAll("path")
.data(json.features)
.enter().append("path")
.attr("class", function(d){
var output = "county";
if(counties[d.properties.NAME]){
output += " " + counties[d.properties.NAME].winner;
}
return output;
})
.on("mousemove", function(d){
d3.select("#tooltip").remove();
d3.select("body").append("div")
.attr("id", "tooltip")
.attr("class", "table")
.html(function(){
output = "<h4>" + d.properties.NAME + "</h4><table><tbody>";
if( counties[d.properties.NAME] ){
$.each( counties[d.properties.NAME].candidates, function(i, candidate){
output += "<tr><td>" + candidate.name + "</td><td><strong>" + toDollars(candidate.amount) + "</td></tr>";
});
}
return output + "</tbody></table>";
})
.style("left", d3.event.clientX + 10 + "px")
.style("top", d3.event.clientY - 20 + "px");
console.log(d3.event.clientX);
})
.on("mouseout", function(d){
d3.select("#tooltip").remove();
})
.on("click", function(d){
window.location = "/a/counties/" + d.properties.NAME;
});
// Detetct orientation of screen and scale map accordingly.
var bounds = d3.geo.path().bounds(json);
// Chooses a mercator projection, sticks it roughly in the center of the screen,
// sets the center of Pennsylvania, scales it up based on bounds of map
projection = d3.geo.mercator().translate([ w / 2.2, h / 1.8]).center([-77.995133, 40.696298]).scale( 800 * w / (bounds[1][0] - bounds[0][0]) );
// Apply transformation
county.attr("d", d3.geo.path().projection(projection));
});
});
}
function drawLocatorMap( id, county ){
map = d3.select("#" + id);
w = $(map.node()).width();
h = $(map.node()).height();
d3.json("js/min.pennsylvania.json", function(error, json) {
// Join shapefile data
county = d3.select("#map")
.selectAll("path")
.data(json.features)
.enter().append("path")
.attr("class", function(d){
var output = "county";
if(d.properties.NAME == county) output += " selected";
return output;
})
.on("mousemove", function(d){
d3.select("#tooltip").remove();
d3.select("body").append("div")
.attr("id", "tooltip")
.attr("class", "table")
.html("<h4>" + d.properties.NAME + "</h4>")
.style("left", d3.event.clientX + 10 + "px")
.style("top", d3.event.clientY - 20 + "px");
console.log(d3.event.clientX);
})
.on("mouseout", function(d){
d3.select("#tooltip").remove();
})
.on("click", function(d){
window.location = "/a/counties/" + d.properties.NAME;
});
// Detetct orientation of screen and scale map accordingly.
var bounds = d3.geo.path().bounds(json);
// Chooses a mercator projection, sticks it roughly in the center of the screen,
// sets the center of Pennsylvania, scales it up based on bounds of map
projection = d3.geo.mercator().translate([ w / 2.2, h / 1.8]).center([-77.995133, 40.696298]).scale( 800 * w / (bounds[1][0] - bounds[0][0]) );
// Apply transformation
county.attr("d", d3.geo.path().projection(projection));
});
} | js/scripts.js | $(document).ready(function() {
/* var mapOptions = {
zoom: 8,
center: new google.maps.LatLng(40.4450813, -80.00877459999998),
disableDefaultUI: true
};
map = new google.maps.Map(document.getElementById('map_canvas'),
mapOptions);*/
//parse url
var pathname = window.location.pathname; //get current url
var split = pathname.split("/"); //treat it like a string, break it up where the /'s are
//console.log(split[2]);
//case statement
switch(split[2]) { //the second item in the array will be the type of page this will be
case "candidates":
$('#bycandidate').addClass('active'); //make the dropdown menu active on the correct item
var candName = decodeURIComponent(split[3]); //grab the candidate name from the url
console.log(toTitleCase(candName));
break;
case "counties":
$('#bycounty').addClass('active');
var countyName = split[3];
console.log("one more tiiiiiiiiiime");
console.log("we're gonna celebrate");
break;
case "contributors":
$('#bycontributor').addClass('active');
var contribID = split[3];
$.getJSON("/api/contributors/" + contribID, function(data){
//var need = "{need variable}";
//intro row
var container = $("#main");
introRow = $("<div></div>").appendTo(container);
introRow.addClass("row intro-row");
var introLabel = $("<label>CONTRIBUTOR</label>");
introLabel.appendTo(introRow);
var jumbotron = $("<div></div>").appendTo(introRow);
jumbotron.addClass("jumbotron");
var contribName = data.results[0].contributor;
var headerAmt = $("<h1>" + contribName + "</h1><div class='thin-divider'></div>");
headerAmt.appendTo(jumbotron);
var contribJob = data.results[0].occupation;
var contribEmp = data.results[0].empName;
//console.log(contribJob.length);
var contribTitle = "";
if((contribJob.length > 0) && (contribEmp.length > 0)){
contribTitle = "<small>" + contribJob + ", " + contribEmp + "</small>";
}else if ((contribJob.length > 0) && (contribEmp.length == 0)){
contribTitle = "<small>" + contribJob + "</small>";
}else if ((contribJob.length == 0) && (contribEmp.length > 0)){
contribTitle = ", <small>" + contribEmp + "</small>";
}else {
contribTitle = "";
};
$(contribTitle).appendTo(headerAmt);
var thinDivider = $("<div class='thin-divider'></div>");
thinDivider.appendTo(jumbotron);
// top totals row
var topTotals = $("<div class='row top-totals'></div>").appendTo(container);
var topTotalsLeft = $("<div class='col-lg-4 col-md-4 col-sm-4 col-xs-12 block first'></div>").appendTo(topTotals);
// Location block
var locationCity = data.results[0].city;
var locationCounty = data.results[0].county;
var topTotalsLeftLocationBlock = $("<div class='row'> \
<div class='col-lg-12 col-md-12 col-sm-12'> \
<label>LOCATION</label><h3>" + locationCity + "</h3> \
<p><a href='/a/counties/" + locationCounty + "'>" + locationCounty + " County</a></p> \
</div> \
</div> \
<div class='thin-divider'></div>"
).appendTo(topTotalsLeft);
thinDivider.appendTo(topTotalsLeft);
//Top contributed block
var topTotalsContrib = $("<div class='row'></div>").appendTo(topTotalsLeft);
var topTotalsContribCol12 = $("<div class='col-lg-12 col-md-12 col-sm-12'></div>").appendTo(topTotalsContrib);
var contributionTotal = data.results[0].amount;
var topTotalsContribLabel = $("<label>TOTAL CONTRIBUTED</label><h3>" + toDollars(contributionTotal) + "</h3>").appendTo(topTotalsContribCol12);
//Candidate breakdown table
var topTotalsContribCandidate = $("<div class='row'></div>").appendTo(topTotalsContribCol12);
var topTotalsContribCandidateCol12 = $("<div class='col-lg-12 col-md-12 col-sm-12'></div>").appendTo(topTotalsContribCandidate);
var topTotalsContribCandidateTable = $("<table class='horizontal-bar-graph'></table>").appendTo(topTotalsContribCandidateCol12);
//Corbett_Wolf variables and logic
var wolfContributionAmt = "";
var wolfContributionNum = "";
var results = data.results[0].beneficiaries;
$.each(results, function(i, item){
if(results[i].filerid == "20130153"){
wolfContributionAmt = results[i].amount;
wolfContributionNum = results[i].contributions;
}else {
wolfContributionAmt = "0";
wolfContributionNum = "0";
}
});
var corbettContributionAmt = "";
var corbettContributionNum = "";
var results = data.results[0].beneficiaries;
$.each(results, function(i, item){
if(results[i].filerid == "2009216"){
corbettContributionAmt = results[i].amount;
corbettContributionNum = results[i].contributions;
}else {
corbettContributionAmt = "0";
corbettContributionNum = "0";
}
});
var wolfBarWidth = "";
var corbettBarWidth = "";
if(wolfContributionAmt > corbettContributionAmt){
wolfBarWidth = "100";
corbettBarWidth = (corbettContributionAmt)/(wolfContributionAmt);
}else if(corbettContributionAmt > wolfContributionAmt){
corbettBarWidth = "100";
wolfBarWidth = (wolfContributionAmt)/(corbettContributionAmt);
} else{
corbettBarWidth = "100";
wolfBarWidth = "100";
};
//Corbett row --> need to make graphic length respect amt donated
var topTotalsCorbettRow = $("<tr><td><strong>Corbett</strong></td><td><div class='bar republican' style='width:" + corbettBarWidth +"%; color:#000000;'></div><span style='overflow:visible;'>" + corbettContributionAmt + " (" + corbettContributionNum + " contributions)" + "</span></td></tr>").appendTo(topTotalsContribCandidateTable);
//Wolf row --> need to make graphic length respect amt donated
var topTotalsWolfRow = $("<tr><td><strong>Wolf</strong></td><td><div class='bar democrat' style='width:" + wolfBarWidth + "%; color:#000000;'></div><span style='overflow:visible;'>" + wolfContributionAmt + " (" + wolfContributionNum + " contributions)" + "</span></td></tr>").appendTo(topTotalsContribCandidateTable);
//Overtime
var topTotalsOvertime = $("<div class='col-lg-8 col-md-8 col-sm-8 col-xs-12 block last'>").appendTo(topTotals);
var topTotalsOvertimeGraph = $("<h3>Contributions over time</h3><div id='timechart' style='width:100%; height:400px'></div>").appendTo(topTotalsOvertime);
makeTimeChart("timechart", "contributors", contribID, "2013-01-01", "2014-11-01");
thinDivider.appendTo(container);
container.append(thinDivider);
container.append('<div class="row tabular"> \
<div class="col-lg-12 col-md-12 col-sm-12"> \
<h3>All donations by ' + contribName +'</h3> \
<form class="form-inline pull-right"> \
<input type="search" class="form-control" placeholder="Search"> \
<button type="submit" class="btn btn-default">Submit</button> \
</form> \
<table class="table table-hover sortable"> \
<thead> \
<tr> \
<th>Date</th>\
<th>Candidate/PAC</th> \
<th>Amount</th> \
</tr> \
</thead> \
<tbody></tbody> \
</table> \
</div> \
</div> ');
//get contributor data
$.getJSON("/api/contributions/contributors/" + contribID, function(data){
$.each(data.results, function(i, result){
$('.tabular tbody').append("<tr><td><a href='/a/contributions/" + result.id + "'>" + result.date + "</a></td><td>" + result.name + "</td><td>" + toDollars(result.contribution) + "</td></tr>");
});
});
});
break;
case "contributions":
$('#bycontributor').addClass('active');
var contributionID = split[3];
$.getJSON("/api/contributions/" + contributionID, function(data){
var contribName = data.results[0].contributor;
var contribID = data.results[0].contributorid;
var contributionAmt = "$" + data.results[0].contribution;
var conDateRaw = data.results[0].date;
var d = new Date(conDateRaw);
var con_date = d.getDate(conDateRaw);
var con_month = d.getMonth(conDateRaw);
var con_year = d.getFullYear(conDateRaw);
var contributionDate = con_month + "/" + con_date + "/" + con_year;
var filerID = data.results[0].filerid;
$.getJSON("/api/contributors/" + contribID, function(data){
var locationCity = data.results[0].city;
var locationZip = data.results[0].zip;
locationZip = locationZip.substring(0,5);
$.getJSON("/api/candidates/", function(data){
var candidateName = "";
var results = data.results;
$.each(results, function(i, item){
if(results[i].filerid == filerID){
candidateName = results[i].name;
}
});
//intro row
var container = $("#main");
introRow = $("<div></div>").appendTo(container);
introRow.addClass("row intro-row");
var introLabel = $("<label>CONTRIBUTION</label>");
introLabel.appendTo(introRow);
var jumbotron = $("<div></div>").appendTo(introRow);
jumbotron.addClass("jumbotron");
var headerAmt = $("<h1>" + contributionAmt + "</h1>");
headerAmt.appendTo(jumbotron);
var thinDivider = $("<div class='thin-divider'></div>");
thinDivider.appendTo(jumbotron);
//donation info row
contributionRow = $("<div></div>").appendTo(container);
contributionRow.addClass("row");
var colmd5 = "col-md-5";
var collg7 = "col-lg-7";
var colmd7 = "col-md-7";
var colsm7 = "col-sm-7";
contributionColumn = $("<div></div>").appendTo(contributionRow);
contributionColumn.addClass(colmd5);
var h3 = "<h3></h3>";
var contribItem = $(h3).appendTo(contributionColumn);
contribItem.addClass("donor-item");
var contribLabel = $("<strong>Contributor: </strong>").appendTo(contribItem);
var contribLink = $("<a>" + contribName + "</a>").appendTo(contribItem);
contribLink.attr("href", "/a/contributors/" + contribID);
var candidateItem = $(h3).appendTo(contributionColumn);
candidateItem.addClass("candidate-item");
var candidateLabel = $("<strong>Candidate: </strong>").appendTo(candidateItem);
var candidateLink = $("<a>" + candidateName + "</a>").appendTo(candidateItem);
candidateLink.attr("href", "/a/candidates/" + candidateName);
var dateItem = $(h3).appendTo(contributionColumn);
dateItem.addClass("date-item");
var dateLabel = $("<strong>Date: </strong>").appendTo(dateItem);
var dateData = $("<span>" + contributionDate + "</span>").appendTo(dateItem);
var locationItem = $(h3).appendTo(contributionColumn);
locationItem.addClass("location-item");
var locationLabel = $("<strong>Location: </strong>").appendTo(locationItem);
var locationData = $("<span>" + locationCity + ", " + locationZip + "</span>").appendTo(locationItem);
function initialize() {
var mapOptions = {
center: { lat: -34.397, lng: 150.644},
zoom: 8
};
var map = new google.maps.Map(document.getElementById('map-canvas'), mapOptions);
};
google.maps.event.addDomListener(window, 'load', initialize);
mapColumn = $("<div class='col-lg-7 col-md-7 col-sm-7' id='map-canvas' style='height:100%;'></div>").appendTo(contributionRow);
$(map).appendTo(mapColumn);
//var countyMap = $("<img class='sm_county_map' src='/img/allegheny-map.png' alt='Allegheny County locator map'>").appendTo(mapColumn);
});
});
});
break;
default:
//default //code block
}
});
function toTitleCase(str)
{
return str.replace(/\w\S*/g, function(txt){return txt.charAt(0).toUpperCase() + txt.substr(1).toLowerCase();});
}
// Converts XXXXXX.XXXXX to $XXX,XXX.XX
function toDollars(x){
return "$" + numberWithCommas( Math.floor(x * 100) / 100 );
}
// Converts XXXXXXXX to XX,XXX,XXX
function numberWithCommas(x){
return x.toString().replace(/\B(?=(\d{3})+(?!\d))/g, ",");
}
function makeTimeChart(id, endpoint, target, startDate, endDate){
$.getJSON("api/months/" + endpoint + "/" + target + "?startDate=" + startDate + "&endDate=" + endDate, function(json){
data = [];
$.each(json.results, function(i, date){
data.push( [Date.parse(date["year"] + "-" + date["month"] + "-" + "01 05:01:00"), date["total"]] );
$.plot("#" + id, [{
data: data,
color:"seagreen"
}], {
series: {
lines:{
lineWidth: 6,
show:true
},
points:{
show:true
}
},
xaxis: {
mode: "time",
min: data[0][0]
},
yaxis: {
tickFormatter: function(val, axis){
return "$" + numberWithCommas(val);
},
min: 0
},
grid: {
borderWidth: {
top: 0,
left: 1,
right: 0,
bottom: 1
},
hoverable: true
},
markings:0
});
$("#" + id).bind("plothover", function(event, pos, item){
if( item ) {
if( $("#tooltip").length == 0 ){
$("<div id='tooltip'></div>").appendTo( $("body") )
.css({top: item.clientY+5, left: item.clientX+5});
$("#tooltip").html("<div class='date'>" + ( new Date(item.datapoint[0]).getMonth() + 1 ) + "/" + new Date(item.datapoint[0]).getFullYear() + "</div><div class='text'>" + toDollars(item.datapoint[1]) + "</div>");
console.log(item.datapoint[0]);
}
else {
$("#tooltip").css({top: item.clientY-20, left: item.clientX+10});
}
x = item.datapoint[0].toFixed(2);
y = item.datapoint[1].toFixed(2);
}
else $("#tooltip").remove();
});
});
});
}
function makePieChart(id, target, target_state){
$.getJSON("api/states/candidates/" + target, function(json) {
data = [];
$.each(json.results, function(i, state){
if( state.state == target_state ) data[1] = { label: "In-state", data: state.amount};
else if ( i == 0 ) data.push({ label: "Out of state", data: parseFloat(state.amount)})
else data[0].data += parseFloat(state.amount);
});
$.plot('#' + id, data, {
series: {
pie: {
show: true,
hoverable: true
}
},
grid: {
hoverable: true
}
});
$("#" + id).bind("plothover", function(e, pos, item){
if( item ) {
if( $("#tooltip").length == 0 ){
$("<div id='tooltip'></div>").appendTo( $("body") );
}
$("#tooltip").css({top: pos.pageY-20, left: pos.pageX+10});
$("#tooltip").html("<div class='text'>" + Math.floor(item.datapoint[0]) + "%</div>");
}
else $("#tooltip").remove();
});
});
}
function drawCandidateMap( id ){
map = d3.select("#" + id);
w = $(map.node()).width();
h = $(map.node()).height();
var counties = [];
$.getJSON("api/counties", function(countiesJSON){
$.each(countiesJSON.results, function(i, county){
county.beneficiaries.sort(function(a,b){
return +b.amount - +a.amount;
});
counties[county.county] = { county: county.county, winner: county.beneficiaries[0].party, candidates: county.beneficiaries };
});
d3.json("js/min.pennsylvania.json", function(error, json) {
// Join shapefile data
county = d3.select("#map")
.selectAll("path")
.data(json.features)
.enter().append("path")
.attr("class", function(d){
var output = "county";
if(counties[d.properties.NAME]){
output += " " + counties[d.properties.NAME].winner;
}
return output;
})
.on("mousemove", function(d){
d3.select("#tooltip").remove();
d3.select("body").append("div")
.attr("id", "tooltip")
.attr("class", "table")
.html(function(){
output = "<h4>" + d.properties.NAME + "</h4><table><tbody>";
if( counties[d.properties.NAME] ){
$.each( counties[d.properties.NAME].candidates, function(i, candidate){
output += "<tr><td>" + candidate.name + "</td><td><strong>" + toDollars(candidate.amount) + "</td></tr>";
});
}
return output + "</tbody></table>";
})
.style("left", d3.event.clientX + 10 + "px")
.style("top", d3.event.clientY - 20 + "px");
console.log(d3.event.clientX);
})
.on("mouseout", function(d){
d3.select("#tooltip").remove();
})
.on("click", function(d){
window.location = "/a/counties/" + d.properties.NAME;
});
// Detetct orientation of screen and scale map accordingly.
var bounds = d3.geo.path().bounds(json);
// Chooses a mercator projection, sticks it roughly in the center of the screen,
// sets the center of Pennsylvania, scales it up based on bounds of map
projection = d3.geo.mercator().translate([ w / 2.2, h / 1.8]).center([-77.995133, 40.696298]).scale( 800 * w / (bounds[1][0] - bounds[0][0]) );
// Apply transformation
county.attr("d", d3.geo.path().projection(projection));
});
});
}
function drawLocatorMap( id, county ){
map = d3.select("#" + id);
w = $(map.node()).width();
h = $(map.node()).height();
d3.json("js/min.pennsylvania.json", function(error, json) {
// Join shapefile data
county = d3.select("#map")
.selectAll("path")
.data(json.features)
.enter().append("path")
.attr("class", function(d){
var output = "county";
if(d.properties.NAME == county) output += " selected";
return output;
})
.on("mousemove", function(d){
d3.select("#tooltip").remove();
d3.select("body").append("div")
.attr("id", "tooltip")
.attr("class", "table")
.html("<h4>" + d.properties.NAME + "</h4>")
.style("left", d3.event.clientX + 10 + "px")
.style("top", d3.event.clientY - 20 + "px");
console.log(d3.event.clientX);
})
.on("mouseout", function(d){
d3.select("#tooltip").remove();
})
.on("click", function(d){
window.location = "/a/counties/" + d.properties.NAME;
});
// Detetct orientation of screen and scale map accordingly.
var bounds = d3.geo.path().bounds(json);
// Chooses a mercator projection, sticks it roughly in the center of the screen,
// sets the center of Pennsylvania, scales it up based on bounds of map
projection = d3.geo.mercator().translate([ w / 2.2, h / 1.8]).center([-77.995133, 40.696298]).scale( 800 * w / (bounds[1][0] - bounds[0][0]) );
// Apply transformation
county.attr("d", d3.geo.path().projection(projection));
});
});
} | fixed bug
| js/scripts.js | fixed bug | <ide><path>s/scripts.js
<ide> h = $(map.node()).height();
<ide>
<ide> var counties = [];
<del> $.getJSON("api/counties", function(countiesJSON){
<del> $.each(countiesJSON.results, function(i, county){
<del> county.beneficiaries.sort(function(a,b){
<del> return +b.amount - +a.amount;
<add> $.getJSON("api/counties", function(countiesJSON){
<add> $.each(countiesJSON.results, function(i, county){
<add> county.beneficiaries.sort(function(a,b){
<add> return +b.amount - +a.amount;
<add> });
<add> counties[county.county] = { county: county.county, winner: county.beneficiaries[0].party, candidates: county.beneficiaries };
<add> });
<add> d3.json("js/min.pennsylvania.json", function(error, json) {
<add> // Join shapefile data
<add> county = d3.select("#map")
<add> .selectAll("path")
<add> .data(json.features)
<add> .enter().append("path")
<add> .attr("class", function(d){
<add> var output = "county";
<add> if(counties[d.properties.NAME]){
<add> output += " " + counties[d.properties.NAME].winner;
<add> }
<add> return output;
<add> })
<add> .on("mousemove", function(d){
<add> d3.select("#tooltip").remove();
<add> d3.select("body").append("div")
<add> .attr("id", "tooltip")
<add> .attr("class", "table")
<add> .html(function(){
<add> output = "<h4>" + d.properties.NAME + "</h4><table><tbody>";
<add> if( counties[d.properties.NAME] ){
<add> $.each( counties[d.properties.NAME].candidates, function(i, candidate){
<add> output += "<tr><td>" + candidate.name + "</td><td><strong>" + toDollars(candidate.amount) + "</td></tr>";
<add> });
<add> }
<add> return output + "</tbody></table>";
<add> })
<add> .style("left", d3.event.clientX + 10 + "px")
<add> .style("top", d3.event.clientY - 20 + "px");
<add> console.log(d3.event.clientX);
<add> })
<add> .on("mouseout", function(d){
<add> d3.select("#tooltip").remove();
<add> })
<add> .on("click", function(d){
<add>
<add> window.location = "/a/counties/" + d.properties.NAME;
<ide> });
<del> counties[county.county] = { county: county.county, winner: county.beneficiaries[0].party, candidates: county.beneficiaries };
<del> });
<del> d3.json("js/min.pennsylvania.json", function(error, json) {
<del> // Join shapefile data
<del> county = d3.select("#map")
<del> .selectAll("path")
<del> .data(json.features)
<del> .enter().append("path")
<del> .attr("class", function(d){
<del> var output = "county";
<del> if(counties[d.properties.NAME]){
<del> output += " " + counties[d.properties.NAME].winner;
<del> }
<del> return output;
<del> })
<del> .on("mousemove", function(d){
<del> d3.select("#tooltip").remove();
<del> d3.select("body").append("div")
<del> .attr("id", "tooltip")
<del> .attr("class", "table")
<del> .html(function(){
<del> output = "<h4>" + d.properties.NAME + "</h4><table><tbody>";
<del> if( counties[d.properties.NAME] ){
<del> $.each( counties[d.properties.NAME].candidates, function(i, candidate){
<del> output += "<tr><td>" + candidate.name + "</td><td><strong>" + toDollars(candidate.amount) + "</td></tr>";
<del> });
<del> }
<del> return output + "</tbody></table>";
<del> })
<del> .style("left", d3.event.clientX + 10 + "px")
<del> .style("top", d3.event.clientY - 20 + "px");
<del> console.log(d3.event.clientX);
<del> })
<del> .on("mouseout", function(d){
<del> d3.select("#tooltip").remove();
<del> })
<del> .on("click", function(d){
<del>
<del> window.location = "/a/counties/" + d.properties.NAME;
<del> });
<del>
<del> // Detetct orientation of screen and scale map accordingly.
<del> var bounds = d3.geo.path().bounds(json);
<del>
<del> // Chooses a mercator projection, sticks it roughly in the center of the screen,
<del> // sets the center of Pennsylvania, scales it up based on bounds of map
<del> projection = d3.geo.mercator().translate([ w / 2.2, h / 1.8]).center([-77.995133, 40.696298]).scale( 800 * w / (bounds[1][0] - bounds[0][0]) );
<del>
<del> // Apply transformation
<del> county.attr("d", d3.geo.path().projection(projection));
<del> });
<add>
<add> // Detetct orientation of screen and scale map accordingly.
<add> var bounds = d3.geo.path().bounds(json);
<add>
<add> // Chooses a mercator projection, sticks it roughly in the center of the screen,
<add> // sets the center of Pennsylvania, scales it up based on bounds of map
<add> projection = d3.geo.mercator().translate([ w / 2.2, h / 1.8]).center([-77.995133, 40.696298]).scale( 800 * w / (bounds[1][0] - bounds[0][0]) );
<add>
<add> // Apply transformation
<add> county.attr("d", d3.geo.path().projection(projection));
<ide> });
<del>}
<del>
<del>
<add> });
<add>}
<ide>
<ide> function drawLocatorMap( id, county ){
<ide> map = d3.select("#" + id);
<ide> w = $(map.node()).width();
<ide> h = $(map.node()).height();
<ide>
<del> d3.json("js/min.pennsylvania.json", function(error, json) {
<del> // Join shapefile data
<del> county = d3.select("#map")
<del> .selectAll("path")
<del> .data(json.features)
<del> .enter().append("path")
<del> .attr("class", function(d){
<del> var output = "county";
<del> if(d.properties.NAME == county) output += " selected";
<del> return output;
<del> })
<del> .on("mousemove", function(d){
<del> d3.select("#tooltip").remove();
<del> d3.select("body").append("div")
<del> .attr("id", "tooltip")
<del> .attr("class", "table")
<del> .html("<h4>" + d.properties.NAME + "</h4>")
<del> .style("left", d3.event.clientX + 10 + "px")
<del> .style("top", d3.event.clientY - 20 + "px");
<del> console.log(d3.event.clientX);
<del> })
<del> .on("mouseout", function(d){
<del> d3.select("#tooltip").remove();
<del> })
<del> .on("click", function(d){
<del> window.location = "/a/counties/" + d.properties.NAME;
<del> });
<del>
<del> // Detetct orientation of screen and scale map accordingly.
<del> var bounds = d3.geo.path().bounds(json);
<del>
<del> // Chooses a mercator projection, sticks it roughly in the center of the screen,
<del> // sets the center of Pennsylvania, scales it up based on bounds of map
<del> projection = d3.geo.mercator().translate([ w / 2.2, h / 1.8]).center([-77.995133, 40.696298]).scale( 800 * w / (bounds[1][0] - bounds[0][0]) );
<del>
<del> // Apply transformation
<del> county.attr("d", d3.geo.path().projection(projection));
<add> d3.json("js/min.pennsylvania.json", function(error, json) {
<add> // Join shapefile data
<add> county = d3.select("#map")
<add> .selectAll("path")
<add> .data(json.features)
<add> .enter().append("path")
<add> .attr("class", function(d){
<add> var output = "county";
<add> if(d.properties.NAME == county) output += " selected";
<add> return output;
<add> })
<add> .on("mousemove", function(d){
<add> d3.select("#tooltip").remove();
<add> d3.select("body").append("div")
<add> .attr("id", "tooltip")
<add> .attr("class", "table")
<add> .html("<h4>" + d.properties.NAME + "</h4>")
<add> .style("left", d3.event.clientX + 10 + "px")
<add> .style("top", d3.event.clientY - 20 + "px");
<add> console.log(d3.event.clientX);
<add> })
<add> .on("mouseout", function(d){
<add> d3.select("#tooltip").remove();
<add> })
<add> .on("click", function(d){
<add> window.location = "/a/counties/" + d.properties.NAME;
<ide> });
<del> });
<del>}
<add>
<add> // Detetct orientation of screen and scale map accordingly.
<add> var bounds = d3.geo.path().bounds(json);
<add>
<add> // Chooses a mercator projection, sticks it roughly in the center of the screen,
<add> // sets the center of Pennsylvania, scales it up based on bounds of map
<add> projection = d3.geo.mercator().translate([ w / 2.2, h / 1.8]).center([-77.995133, 40.696298]).scale( 800 * w / (bounds[1][0] - bounds[0][0]) );
<add>
<add> // Apply transformation
<add> county.attr("d", d3.geo.path().projection(projection));
<add> });
<add>} |
|
JavaScript | mit | 9a130f34a7a8a70d334909ed47a37431ff17a91e | 0 | zubairq/gosharedata,zubairq/yazz,zubairq/gosharedata,zubairq/yazz | function(args) {
/*
is_app(true)
control_type("VB")
display_name("3d control")
description("This will return the 3d container control")
base_component_id("container_3d")
load_once_from_file(true)
visibility("PRIVATE")
read_only(true)
properties(
[
{
id: "text",
name: "Text",
type: "String"
}
,
{
id: "lastKeyPressed",
name: "Last key pressed",
default: "",
type: "String"
}
,
{
id: "lastKeyDown",
name: "Last key down",
default: "",
type: "String"
}
,
{
id: "background_color",
name: "Background color",
type: "String"
}
,
{
id: "is_container",
name: "Is Container?",
type: "Boolean",
default: true,
hidden: true
}
,
{
id: "hide_children",
name: "Hide Children?",
type: "Boolean",
default: true,
hidden: true
}
,
{
id: "select_parent_when_child_added",
name: "Select Parent Only?",
type: "Boolean",
default: true,
hidden: true
}
,
{
id: "has_details_ui",
name: "Has details UI?",
type: "Boolean",
default: true,
hidden: true
}
,
{
id: "width",
name: "Width",
default: 300,
type: "Number"
}
,
{
id: "height",
name: "Height",
default: 300,
type: "Number"
}
,
{
id: "x",
name: "Camera X",
type: "Number",
default: 0
}
,
{
id: "y",
name: "Camera Y",
type: "Number",
default: 1.6
}
,
{
id: "z",
name: "Camera Z",
type: "Number",
default: 1
}
,
{
id: "cameraRight",
snippet: `cameraRight(1,[[duration in ms]],[[bounce: true/false]])`,
name: "Camera Right()",
type: "Action"
}
,
{
id: "getVRMode",
snippet: `getVRMode()`,
name: "Get VR Mode()",
type: "Action"
}
,
{
id: "cameraUp",
snippet: `cameraUp(1,[[duration in ms]],[[bounce: true/false]])`,
name: "Camera Up()",
type: "Action"
}
,
{
id: "cameraLeft",
snippet: `cameraLeft(1,[[duration in ms]],[[bounce: true/false]])`,
name: "Camera Left()",
type: "Action"
}
,
{
id: "cameraDown",
snippet: `cameraDown(1,[[duration in ms]],[[bounce: true/false]])`,
name: "Camera Down()",
type: "Action"
}
,
{
id: "cameraBack",
snippet: `cameraBack(1,[[duration in ms]],[[bounce: true/false]])`,
name: "Camera Back()",
type: "Action"
}
,
{
id: "cameraForward",
snippet: `cameraForward(1,[[duration in ms]],[[bounce: true/false]])`,
name: "Camera Forward()",
type: "Action"
}
,
{
id: "cameraTo",
snippet: `cameraTo({x: , y:, z:})`,
name: "Camera To()",
type: "Action"
}
,
{
id: "enterVR",
snippet: `enterVR()`,
name: "Enter VR()",
type: "Action"
}
,
{
id: "exitVR",
snippet: `exitVR()`,
name: "Exit VR()",
type: "Action"
}
,
{
id: "keypress_event",
name: "Key Press Event",
type: "Event"
}
,
{
id: "keydown_event",
name: "Key Down Event",
type: "Event"
}
,
{
id: "backspace_event",
name: "BackspaceEvent",
type: "Event"
}
,
{
id: "delete_event",
name: "Delete Event",
type: "Event"
}
,
{
id: "enter_event",
name: "Enter Event",
type: "Event"
}
]
)//properties
logo_url("/driver_icons/threedee_item.png")
*/
Vue.component("container_3d",{
props: ["args","design_mode", "refresh", "children","delete_design_time_component","select_design_time_component"]
,
template:
`<div v-bind:style='"width:100%;overflow-y:auto;height:100%"'
v-bind:refresh='refresh'>
<div v-bind:style='"width:100%;height:40vh;overflow-y:auto;"'
v-bind:refresh='refresh'
v-if='design_mode == "detail_editor"'>
3D Scene Detail editor
<div v-bind:style='"border:1px solid gray; padding: 10px;display:flex;" + ((selected_index==index)?"background-color: lightgray;":"")'
v-bind:refresh='refresh'
v-on:click='$event.stopPropagation();selected_index=index;select_design_time_component(child_item.index_in_parent_array)'
v-for='(child_item,index) in children'>
<div v-if='child_item'
v-bind:refresh='refresh'>
<div v-bind:style='"display:inline-block;"'
v-if='isValidObject(child_item)'
v-bind:refresh='refresh'>{{child_item.name}}</div>
<div class='btn btn-danger'
v-bind:refresh='refresh'
v-if='child_item'
v-bind:style='"box-shadow: rgba(0, 0, 0, 0.2) 0px 4px 8px 0px, rgba(0, 0, 0, 0.19) 0px 6px 20px 0px;padding:0px; z-index: 21474836;opacity:1;" +
"width: 20px; height: 20px; line-height:20px;text-align: center;vertical-align: middle;margin-left: 20px;"'
v-on:click='$event.stopPropagation();delete_design_time_component(child_item.index_in_parent_array)'>
X
</div>
</div>
</div>
</div>
<div v-bind:style='"width:100%;height:40vh;overflow-y:auto;"'
v-bind:refresh='refresh'
v-if='design_mode == true'>
3D Scene - {{children.length}} items
</div>
<div id='3d_scene'
ref='3d_scene'
v-bind:style='"width:" + args.width + "; height: " + args.height + ";"'
v-if='design_mode == false'
v-bind:refresh='refresh'>
<a-scene v-bind:id='(design_mode?"design_scene":"scene")'
v-bind:ref='(design_mode?"design_scene":"scene")'
physics-world=""
v-if='design_mode == false'
physics="debug: false"
cursor="rayOrigin: mouse"
style='width: 80%; height: 80%;'
embedded
allowvr="yes"
v-bind:refresh='refresh'>
<a-entity geometry="primitive: box; depth: 50; height: 0.1; width: 50"
material="color: #2E3837"
v-bind:refresh='refresh'
static-body
physics-body="mass: 0; boundingBox: 50 0.1 50" position="0 0 -10">
</a-entity>
<a-entity v-bind:id='"camera_rig_3d"'
v-bind:position='args.x + " " + args.y + " " + args.z'>
<a-entity id="camera" camera look-controls>
</a-entity>
<a-entity id="laser"
v-if="headsetConnected"
laser-controls="hand: right"
raycaster="hand: right;model: true;"
line="opacity:1.0;">
</a-entity>
</a-entity>
<slot v-bind:refresh='refresh'>
</slot>
</a-scene>
</div>
</div>`
,
mounted: function() {
var mm = this
registerComponent(this)
if (!this.design_mode) {
//var scene = document.querySelector('a-scene');
//if (isValidObject(scene)) {
//scene.addEventListener('click', function () {
// Apply impulse;
//setTimeout(function () {
//var box = document.getElementById('left-box');
//var impulse = { x: 0, y: 10, z: 0 };
//var point = { x: 0.5, y: 0, z: 0 };
//box['physics-body'].applyImpulse(impulse, point);
//}, 25);
//});
//}
appSetInterval(function(){
if (AFRAME.utils.device.checkHeadsetConnected()) {
mm.headsetConnected = true
} else {
mm.headsetConnected = false
}
if (document.fullscreen) {
mm.inVRMode = true
mm.keyboardEnabled = true
} else {
mm.inVRMode = false
mm.keyboardEnabled = false
}
},2000)
if (!isValidObject(window.vrKeyPressEventLisener)) {
window.vrKeyPressEventLisener = document.addEventListener('keypress', function(kevent) {
if(mm.keyboardEnabled) {
var keynum
if(window.event) { // IE
keynum = kevent.keyCode;
} else if(kevent.which){ // Netscape/Firefox/Opera
keynum = kevent.which;
}
mm.keyPressEventHandler(keynum)
}
});
}
if (!isValidObject(window.vrKeyDownEventLisener)) {
window.vrKeyDownEventLisener = document.addEventListener('keydown', function(kevent) {
if(mm.keyboardEnabled) {
var keynum
if(window.event) { // IE
keynum = kevent.keyCode;
} else if(kevent.which){ // Netscape/Firefox/Opera
keynum = kevent.which;
}
mm.keyDownEventHandler(keynum)
}
});
}
}
}
,
data: function() {
return {
selected_index: null,
headsetConnected: false,
keyboardEnabled: false,
inVRMode: false
}
}
,
methods: {
cameraRight: async function(amount, duration, bounce) {
await this.cameraTo({
duration: isValidObject(duration)?duration:2000,
bounce: isValidObject(bounce)?bounce:false,
x: this.args.x + amount
})
}
,
cameraLeft: async function(amount, duration, bounce) {
await this.cameraTo({
duration: isValidObject(duration)?duration:2000,
bounce: isValidObject(bounce)?bounce:false,
x: this.args.x - amount
})
}
,
cameraUp: async function(amount, duration, bounce) {
await this.cameraTo({
duration: isValidObject(duration)?duration:2000,
bounce: isValidObject(bounce)?bounce:false,
y: this.args.y + amount
})
}
,
cameraDown: async function(amount, duration, bounce) {
await this.cameraTo({
duration: isValidObject(duration)?duration:2000,
bounce: isValidObject(bounce)?bounce:false,
y: this.args.y - amount
})
}
,
cameraBack: async function(amount, duration, bounce) {
await this.cameraTo({
duration: isValidObject(duration)?duration:2000,
bounce: isValidObject(bounce)?bounce:false,
z: this.args.z + amount
})
}
,
cameraForward: async function(amount, duration, bounce) {
await this.cameraTo({
duration: isValidObject(duration)?duration:2000,
bounce: isValidObject(bounce)?bounce:false,
z: this.args.z - amount
})
}
,
cameraTo: async function(opts) {
var mm = this
var dd = document.querySelector("#camera_rig_3d" )
var loop = "0"
var direction = "normal"
var duration = 2000
var bounce = false
var newX = this.args.x
var newY = this.args.y
var newZ = this.args.z
if (isValidObject(opts.x)) {
newX = opts.x
}
if (isValidObject(opts.y)) {
newY = opts.y
}
if (isValidObject(opts.z)) {
newZ = opts.z
}
var newPosition = newX + " " + newY + " " + newZ
if (isValidObject(opts.loop)) {
loop = opts.loop
}
if (isValidObject(opts.bounce) && (opts.bounce == true)) {
bounce = true
direction = "alternate"
loop = "1"
}
if (isValidObject(opts.duration)) {
duration = opts.duration
}
//
// we need Math.random here as otherwise the animation will not get triggered for
// repeated invocations
//
dd.setAttribute("animation",
`property: position; to: ${newPosition}; loop: ${loop}; dur: ${duration}; dir: ${direction}; rand: ${Math.random()} `
);
if (!bounce) {
setTimeout(function() {
mm.args.x = newX
mm.args.y = newY
mm.args.z = newZ
},(duration + 100))
}
return {}
}
,
enterVR: async function() {
var scene = document.querySelector('#scene');
scene.enterVR()
}
,
exitVR: async function() {
var scene = document.querySelector('#scene');
scene.exitVR()
}
,
getVRMode: async function() {
return this.inVRMode
}
,
keyPressEventHandler: function(keyCode) {
this.args.lastKeyPressed = String.fromCharCode(keyCode)
this.$emit('send', {
type: "subcomponent_event",
control_name: this.args.name,
sub_type: "keypressed",
args: {
key_pressed: String.fromCharCode(keyCode),
key_code: keyCode
},
code: this.args.keypress_event
})
}
,
keyDownEventHandler: function(keyCode) {
this.args.lastKeyDown = String.fromCharCode(keyCode)
this.$emit('send', {
type: "subcomponent_event",
control_name: this.args.name,
sub_type: "keydown",
args: {
key_down: String.fromCharCode(keyCode),
key_code: keyCode
},
code: this.args.keydown_event
})
if (keyCode == 8) {
this.$emit('send', {
type: "subcomponent_event",
control_name: this.args.name,
sub_type: "backspace",
code: this.args.backspace_event
})
}
if (keyCode == 46) {
this.$emit('send', {
type: "subcomponent_event",
control_name: this.args.name,
sub_type: "delete",
code: this.args.delete_event
})
}
if (keyCode == 13) {
this.$emit('send', {
type: "subcomponent_event",
control_name: this.args.name,
sub_type: "enter",
code: this.args.enter_event
})
}
}
}
})
}
| public/visifile_drivers/controls/container_3d.js | function(args) {
/*
is_app(true)
control_type("VB")
display_name("3d control")
description("This will return the 3d container control")
base_component_id("container_3d")
load_once_from_file(true)
visibility("PRIVATE")
read_only(true)
properties(
[
{
id: "text",
name: "Text",
type: "String"
}
,
{
id: "lastKeyPressed",
name: "Last key pressed",
default: "",
type: "String"
}
,
{
id: "lastKeyDown",
name: "Last key down",
default: "",
type: "String"
}
,
{
id: "background_color",
name: "Background color",
type: "String"
}
,
{
id: "is_container",
name: "Is Container?",
type: "Boolean",
default: true,
hidden: true
}
,
{
id: "hide_children",
name: "Hide Children?",
type: "Boolean",
default: true,
hidden: true
}
,
{
id: "select_parent_when_child_added",
name: "Select Parent Only?",
type: "Boolean",
default: true,
hidden: true
}
,
{
id: "has_details_ui",
name: "Has details UI?",
type: "Boolean",
default: true,
hidden: true
}
,
{
id: "width",
name: "Width",
default: 300,
type: "Number"
}
,
{
id: "height",
name: "Height",
default: 300,
type: "Number"
}
,
{
id: "x",
name: "Camera X",
type: "Number",
default: 0
}
,
{
id: "y",
name: "Camera Y",
type: "Number",
default: 1.6
}
,
{
id: "z",
name: "Camera Z",
type: "Number",
default: 1
}
,
{
id: "cameraRight",
snippet: `cameraRight(1,[[duration in ms]],[[bounce: true/false]])`,
name: "Camera Right()",
type: "Action"
}
,
{
id: "getVRMode",
snippet: `getVRMode()`,
name: "Get VR Mode()",
type: "Action"
}
,
{
id: "cameraUp",
snippet: `cameraUp(1,[[duration in ms]],[[bounce: true/false]])`,
name: "Camera Up()",
type: "Action"
}
,
{
id: "cameraLeft",
snippet: `cameraLeft(1,[[duration in ms]],[[bounce: true/false]])`,
name: "Camera Left()",
type: "Action"
}
,
{
id: "cameraDown",
snippet: `cameraDown(1,[[duration in ms]],[[bounce: true/false]])`,
name: "Camera Down()",
type: "Action"
}
,
{
id: "cameraBack",
snippet: `cameraBack(1,[[duration in ms]],[[bounce: true/false]])`,
name: "Camera Back()",
type: "Action"
}
,
{
id: "cameraForward",
snippet: `cameraForward(1,[[duration in ms]],[[bounce: true/false]])`,
name: "Camera Forward()",
type: "Action"
}
,
{
id: "cameraTo",
snippet: `cameraTo({x: , y:, z:})`,
name: "Camera To()",
type: "Action"
}
,
{
id: "enterVR",
snippet: `enterVR()`,
name: "Enter VR()",
type: "Action"
}
,
{
id: "exitVR",
snippet: `exitVR()`,
name: "Exit VR()",
type: "Action"
}
,
{
id: "keypress_event",
name: "Key Press Event",
type: "Event"
}
,
{
id: "keydown_event",
name: "Key Down Event",
type: "Event"
}
]
)//properties
logo_url("/driver_icons/threedee_item.png")
*/
Vue.component("container_3d",{
props: ["args","design_mode", "refresh", "children","delete_design_time_component","select_design_time_component"]
,
template:
`<div v-bind:style='"width:100%;overflow-y:auto;height:100%"'
v-bind:refresh='refresh'>
<div v-bind:style='"width:100%;height:40vh;overflow-y:auto;"'
v-bind:refresh='refresh'
v-if='design_mode == "detail_editor"'>
3D Scene Detail editor
<div v-bind:style='"border:1px solid gray; padding: 10px;display:flex;" + ((selected_index==index)?"background-color: lightgray;":"")'
v-bind:refresh='refresh'
v-on:click='$event.stopPropagation();selected_index=index;select_design_time_component(child_item.index_in_parent_array)'
v-for='(child_item,index) in children'>
<div v-if='child_item'
v-bind:refresh='refresh'>
<div v-bind:style='"display:inline-block;"'
v-if='isValidObject(child_item)'
v-bind:refresh='refresh'>{{child_item.name}}</div>
<div class='btn btn-danger'
v-bind:refresh='refresh'
v-if='child_item'
v-bind:style='"box-shadow: rgba(0, 0, 0, 0.2) 0px 4px 8px 0px, rgba(0, 0, 0, 0.19) 0px 6px 20px 0px;padding:0px; z-index: 21474836;opacity:1;" +
"width: 20px; height: 20px; line-height:20px;text-align: center;vertical-align: middle;margin-left: 20px;"'
v-on:click='$event.stopPropagation();delete_design_time_component(child_item.index_in_parent_array)'>
X
</div>
</div>
</div>
</div>
<div v-bind:style='"width:100%;height:40vh;overflow-y:auto;"'
v-bind:refresh='refresh'
v-if='design_mode == true'>
3D Scene - {{children.length}} items
</div>
<div id='3d_scene'
ref='3d_scene'
v-bind:style='"width:" + args.width + "; height: " + args.height + ";"'
v-if='design_mode == false'
v-bind:refresh='refresh'>
<a-scene v-bind:id='(design_mode?"design_scene":"scene")'
v-bind:ref='(design_mode?"design_scene":"scene")'
physics-world=""
v-if='design_mode == false'
physics="debug: false"
cursor="rayOrigin: mouse"
style='width: 80%; height: 80%;'
embedded
allowvr="yes"
v-bind:refresh='refresh'>
<a-entity geometry="primitive: box; depth: 50; height: 0.1; width: 50"
material="color: #2E3837"
v-bind:refresh='refresh'
static-body
physics-body="mass: 0; boundingBox: 50 0.1 50" position="0 0 -10">
</a-entity>
<a-entity v-bind:id='"camera_rig_3d"'
v-bind:position='args.x + " " + args.y + " " + args.z'>
<a-entity id="camera" camera look-controls>
</a-entity>
<a-entity id="laser"
v-if="headsetConnected"
laser-controls="hand: right"
raycaster="hand: right;model: true;"
line="opacity:1.0;">
</a-entity>
</a-entity>
<slot v-bind:refresh='refresh'>
</slot>
</a-scene>
</div>
</div>`
,
mounted: function() {
var mm = this
registerComponent(this)
if (!this.design_mode) {
//var scene = document.querySelector('a-scene');
//if (isValidObject(scene)) {
//scene.addEventListener('click', function () {
// Apply impulse;
//setTimeout(function () {
//var box = document.getElementById('left-box');
//var impulse = { x: 0, y: 10, z: 0 };
//var point = { x: 0.5, y: 0, z: 0 };
//box['physics-body'].applyImpulse(impulse, point);
//}, 25);
//});
//}
appSetInterval(function(){
if (AFRAME.utils.device.checkHeadsetConnected()) {
mm.headsetConnected = true
} else {
mm.headsetConnected = false
}
if (document.fullscreen) {
mm.inVRMode = true
mm.keyboardEnabled = true
} else {
mm.inVRMode = false
mm.keyboardEnabled = false
}
},2000)
if (!isValidObject(window.vrKeyPressEventLisener)) {
window.vrKeyPressEventLisener = document.addEventListener('keypress', function(kevent) {
if(mm.keyboardEnabled) {
var keynum
if(window.event) { // IE
keynum = kevent.keyCode;
} else if(kevent.which){ // Netscape/Firefox/Opera
keynum = kevent.which;
}
mm.keyPressEventHandler(keynum)
}
});
}
if (!isValidObject(window.vrKeyDownEventLisener)) {
window.vrKeyDownEventLisener = document.addEventListener('keydown', function(kevent) {
if(mm.keyboardEnabled) {
var keynum
if(window.event) { // IE
keynum = kevent.keyCode;
} else if(kevent.which){ // Netscape/Firefox/Opera
keynum = kevent.which;
}
mm.keyDownEventHandler(keynum)
}
});
}
}
}
,
data: function() {
return {
selected_index: null,
headsetConnected: false,
keyboardEnabled: false,
inVRMode: false
}
}
,
methods: {
cameraRight: async function(amount, duration, bounce) {
await this.cameraTo({
duration: isValidObject(duration)?duration:2000,
bounce: isValidObject(bounce)?bounce:false,
x: this.args.x + amount
})
}
,
cameraLeft: async function(amount, duration, bounce) {
await this.cameraTo({
duration: isValidObject(duration)?duration:2000,
bounce: isValidObject(bounce)?bounce:false,
x: this.args.x - amount
})
}
,
cameraUp: async function(amount, duration, bounce) {
await this.cameraTo({
duration: isValidObject(duration)?duration:2000,
bounce: isValidObject(bounce)?bounce:false,
y: this.args.y + amount
})
}
,
cameraDown: async function(amount, duration, bounce) {
await this.cameraTo({
duration: isValidObject(duration)?duration:2000,
bounce: isValidObject(bounce)?bounce:false,
y: this.args.y - amount
})
}
,
cameraBack: async function(amount, duration, bounce) {
await this.cameraTo({
duration: isValidObject(duration)?duration:2000,
bounce: isValidObject(bounce)?bounce:false,
z: this.args.z + amount
})
}
,
cameraForward: async function(amount, duration, bounce) {
await this.cameraTo({
duration: isValidObject(duration)?duration:2000,
bounce: isValidObject(bounce)?bounce:false,
z: this.args.z - amount
})
}
,
cameraTo: async function(opts) {
var mm = this
var dd = document.querySelector("#camera_rig_3d" )
var loop = "0"
var direction = "normal"
var duration = 2000
var bounce = false
var newX = this.args.x
var newY = this.args.y
var newZ = this.args.z
if (isValidObject(opts.x)) {
newX = opts.x
}
if (isValidObject(opts.y)) {
newY = opts.y
}
if (isValidObject(opts.z)) {
newZ = opts.z
}
var newPosition = newX + " " + newY + " " + newZ
if (isValidObject(opts.loop)) {
loop = opts.loop
}
if (isValidObject(opts.bounce) && (opts.bounce == true)) {
bounce = true
direction = "alternate"
loop = "1"
}
if (isValidObject(opts.duration)) {
duration = opts.duration
}
//
// we need Math.random here as otherwise the animation will not get triggered for
// repeated invocations
//
dd.setAttribute("animation",
`property: position; to: ${newPosition}; loop: ${loop}; dur: ${duration}; dir: ${direction}; rand: ${Math.random()} `
);
if (!bounce) {
setTimeout(function() {
mm.args.x = newX
mm.args.y = newY
mm.args.z = newZ
},(duration + 100))
}
return {}
}
,
enterVR: async function() {
var scene = document.querySelector('#scene');
scene.enterVR()
}
,
exitVR: async function() {
var scene = document.querySelector('#scene');
scene.exitVR()
}
,
getVRMode: async function() {
return this.inVRMode
}
,
keyPressEventHandler: function(keyCode) {
this.args.lastKeyPressed = String.fromCharCode(keyCode)
this.$emit('send', {
type: "subcomponent_event",
control_name: this.args.name,
sub_type: "keypressed",
args: {
key_pressed: String.fromCharCode(keyCode),
key_code: keyCode
},
code: this.args.keypress_event
})
}
,
keyDownEventHandler: function(keyCode) {
this.args.lastKeyDown = String.fromCharCode(keyCode)
this.$emit('send', {
type: "subcomponent_event",
control_name: this.args.name,
sub_type: "keydown",
args: {
key_down: String.fromCharCode(keyCode),
key_code: keyCode
},
code: this.args.keydown_event
})
}
}
})
}
| Can trap backspace, delete, and enter in 3d container
| public/visifile_drivers/controls/container_3d.js | Can trap backspace, delete, and enter in 3d container | <ide><path>ublic/visifile_drivers/controls/container_3d.js
<ide> {
<ide> id: "keydown_event",
<ide> name: "Key Down Event",
<add> type: "Event"
<add> }
<add> ,
<add> {
<add> id: "backspace_event",
<add> name: "BackspaceEvent",
<add> type: "Event"
<add> }
<add> ,
<add> {
<add> id: "delete_event",
<add> name: "Delete Event",
<add> type: "Event"
<add> }
<add> ,
<add> {
<add> id: "enter_event",
<add> name: "Enter Event",
<ide> type: "Event"
<ide> }
<ide>
<ide> },
<ide> code: this.args.keydown_event
<ide> })
<add>
<add> if (keyCode == 8) {
<add> this.$emit('send', {
<add> type: "subcomponent_event",
<add> control_name: this.args.name,
<add> sub_type: "backspace",
<add> code: this.args.backspace_event
<add> })
<add> }
<add> if (keyCode == 46) {
<add> this.$emit('send', {
<add> type: "subcomponent_event",
<add> control_name: this.args.name,
<add> sub_type: "delete",
<add> code: this.args.delete_event
<add> })
<add> }
<add> if (keyCode == 13) {
<add> this.$emit('send', {
<add> type: "subcomponent_event",
<add> control_name: this.args.name,
<add> sub_type: "enter",
<add> code: this.args.enter_event
<add> })
<add> }
<add>
<add>
<add>
<add>
<add>
<add>
<ide> }
<ide> }
<ide> }) |
|
Java | apache-2.0 | e859d4004c0c9f1257aa39c88ed80630c8f0f032 | 0 | TheNephilim88/andlytics,willlunniss/andlytics,willlunniss/andlytics,AndyScherzinger/andlytics,d4rken/andlytics,TheNephilim88/andlytics,AndyScherzinger/andlytics,nelenkov/andlytics | package com.github.andlyticsproject.console.v2;
import android.annotation.TargetApi;
import android.os.Build;
import android.util.JsonReader;
import android.util.Log;
import com.github.andlyticsproject.console.DevConsoleException;
import com.github.andlyticsproject.model.AppDetails;
import com.github.andlyticsproject.model.AppInfo;
import com.github.andlyticsproject.model.AppStats;
import com.github.andlyticsproject.model.Comment;
import com.github.andlyticsproject.model.Revenue;
import com.github.andlyticsproject.model.RevenueSummary;
import com.github.andlyticsproject.util.FileUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Locale;
/**
* This class contains static methods used to parse JSON from {@link DevConsoleV2}
*
* See {@link https://github.com/AndlyticsProject/andlytics/wiki/Developer-Console-v2} for some more
* documentation
*
*/
public class JsonParser {
private static final String TAG = JsonParser.class.getSimpleName();
private static final boolean DEBUG = false;
private JsonParser() {
}
/**
* Parses the supplied JSON string and adds the extracted ratings to the supplied
* {@link AppStats} object
*
* @param json
* @param stats
* @throws JSONException
*/
static void parseRatings(String json, AppStats stats) throws JSONException {
// Extract just the array with the values
JSONObject values = new JSONObject(json).getJSONObject("result").getJSONArray("1")
.getJSONObject(0);
// Ratings are at index 2 - 6
stats.setRating(values.getInt("2"), values.getInt("3"), values.getInt("4"),
values.getInt("5"), values.getInt("6"));
}
/**
* Parses the supplied JSON string and adds the extracted statistics to the supplied
* {@link AppStats} object
* based on the supplied statsType
* Not used at the moment
*
* @param json
* @param stats
* @param statsType
* @throws JSONException
*/
static void parseStatistics(String json, AppStats stats, int statsType) throws JSONException {
// Extract the top level values array
JSONObject values = new JSONObject(json).getJSONObject("result").getJSONObject("1");
/*
* null
* Nested array [null, [null, Array containing historical data]]
* null
* null
* null
* Nested arrays containing summary and historical data broken down by dimension e.g.
* Android version
* null
* null
* App name
*/
// For now we just care about todays value, later we may delve into the historical and
// dimensioned data
JSONArray historicalData = values.getJSONObject("1").getJSONArray("1");
JSONObject latestData = historicalData.getJSONObject(historicalData.length() - 1);
/*
* null
* Date
* [null, value]
*/
int latestValue = latestData.getJSONObject("2").getInt("1");
switch (statsType) {
case DevConsoleV2Protocol.STATS_TYPE_TOTAL_USER_INSTALLS:
stats.setTotalDownloads(latestValue);
break;
case DevConsoleV2Protocol.STATS_TYPE_ACTIVE_DEVICE_INSTALLS:
stats.setActiveInstalls(latestValue);
break;
default:
break;
}
}
/**
* Parses the supplied JSON string and builds a list of apps from it
*
* @param json
* @param accountName
* @param skipIncomplete
* @return List of apps
* @throws JSONException
*/
static List<AppInfo> parseAppInfos(String json, String accountName, boolean skipIncomplete)
throws JSONException {
Date now = new Date();
List<AppInfo> apps = new ArrayList<AppInfo>();
// Extract the base array containing apps
JSONObject result = new JSONObject(json).getJSONObject("result");
if (DEBUG) {
pp("result", result);
}
JSONArray jsonApps = result.optJSONArray("1");
if (DEBUG) {
pp("jsonApps", jsonApps);
}
if (jsonApps == null) {
// no apps yet?
return apps;
}
int numberOfApps = jsonApps.length();
Log.d(TAG, String.format("Found %d apps in JSON", numberOfApps));
for (int i = 0; i < numberOfApps; i++) {
AppInfo app = new AppInfo();
app.setAccount(accountName);
app.setLastUpdate(now);
// Per app:
// 1 : { 1: package name,
// 2 : { 1: [{1 : lang, 2: name, 3: description, 4: ??, 5: what's new}], 2 : ?? },
// 3 : ??,
// 4 : update history,
// 5 : price,
// 6 : update date,
// 7 : state?
// }
// 2 : {}
// 3 : { 1: active dnd, 2: # ratings, 3: avg rating, 4: ???, 5: total dnd }
// arrays have changed to objects, with the index as the key
/*
* Per app:
* null
* [ APP_INFO_ARRAY
* * null
* * packageName
* * Nested array with details
* * null
* * Nested array with version details
* * Nested array with price details
* * Last update Date
* * Number [1=published, 5 = draft?]
* ]
* null
* [ APP_STATS_ARRAY
* * null,
* * Active installs
* * Total ratings
* * Average rating
* * Errors
* * Total installs
* ]
*/
JSONObject jsonApp = jsonApps.getJSONObject(i);
JSONObject jsonAppInfo = jsonApp.getJSONObject("1");
if (DEBUG) {
pp("jsonAppInfo", jsonAppInfo);
}
String packageName = jsonAppInfo.getString("1");
// Look for "tmp.7238057230750432756094760456.235728507238057230542"
if (packageName == null
|| (packageName.startsWith("tmp.") && Character.isDigit(packageName.charAt(4)))) {
Log.d(TAG, String.format("Skipping draft app %d, package name=%s", i, packageName));
continue;
// Draft app
}
// Check number code and last updated date
// Published: 1
// Unpublished: 2
// Draft: 5
// Draft w/ in-app items?: 6
// TODO figure out the rest and add don't just skip, filter, etc. Cf. #223
int publishState = jsonAppInfo.optInt("7");
Log.d(TAG, String.format("%s: publishState=%d", packageName, publishState));
if (publishState != 1) {
// Not a published app, skipping
Log.d(TAG, String.format(
"Skipping app %d with state != 1: package name=%s: state=%d", i,
packageName, publishState));
continue;
}
app.setPublishState(publishState);
app.setPackageName(packageName);
/*
* Per app details:
* 1: Country code
* 2: App Name
* 3: Description
* 4: Promo text
* 5: Last what's new
*/
// skip if we can't get all the data
// XXX should we just let this crash so we know there is a problem?
if (!jsonAppInfo.has("2")) {
if (skipIncomplete) {
Log.d(TAG, String.format(
"Skipping app %d because no app details found: package name=%s", i,
packageName));
} else {
Log.d(TAG, "Adding incomplete app: " + packageName);
apps.add(app);
}
continue;
}
if (!jsonAppInfo.has("4")) {
if (skipIncomplete) {
Log.d(TAG, String.format(
"Skipping app %d because no versions info found: package name=%s", i,
packageName));
} else {
Log.d(TAG, "Adding incomplete app: " + packageName);
apps.add(app);
}
continue;
}
JSONObject appDetails = jsonAppInfo.getJSONObject("2").getJSONArray("1")
.getJSONObject(0);
if (DEBUG) {
pp("appDetails", appDetails);
}
app.setName(appDetails.getString("2"));
String description = appDetails.getString("3");
String changelog = appDetails.optString("5");
Long lastPlayStoreUpdate = jsonAppInfo.optLong("6");
AppDetails details = new AppDetails(description, changelog, lastPlayStoreUpdate);
app.setDetails(details);
/*
* Per app version details:
* null
* null
* packageName
* versionNumber
* versionName
* null
* Array with app icon [null,null,null,icon]
*/
// XXX
JSONArray appVersions = jsonAppInfo.getJSONObject("4").getJSONObject("1")
.optJSONArray("1");
if (DEBUG) {
pp("appVersions", appVersions);
}
if (appVersions == null) {
if (skipIncomplete) {
Log.d(TAG, String.format(
"Skipping app %d because no versions info found: package name=%s", i,
packageName));
} else {
Log.d(TAG, "Adding incomplete app: " + packageName);
apps.add(app);
}
continue;
}
JSONObject lastAppVersionDetails = appVersions.getJSONObject(appVersions.length() - 1)
.getJSONObject("2");
if (DEBUG) {
pp("lastAppVersionDetails", lastAppVersionDetails);
}
app.setVersionName(lastAppVersionDetails.getString("4"));
app.setIconUrl(lastAppVersionDetails.getJSONObject("6").getString("3"));
// App stats
/*
* null,
* Active installs
* Total ratings
* Average rating
* Errors
* Total installs
*/
// XXX this index might not be correct for all apps?
// 3 : { 1: active dnd, 2: # ratings, 3: avg rating, 4: #errors?, 5: total dnd }
JSONObject jsonAppStats = jsonApp.optJSONObject("3");
if (DEBUG) {
pp("jsonAppStats", jsonAppStats);
}
if (jsonAppStats == null) {
if (skipIncomplete) {
Log.d(TAG, String.format(
"Skipping app %d because no stats found: package name=%s", i,
packageName));
} else {
Log.d(TAG, "Adding incomplete app: " + packageName);
apps.add(app);
}
continue;
}
AppStats stats = new AppStats();
stats.setDate(now);
if (jsonAppStats.length() < 4) {
// no statistics (yet?) or weird format
// TODO do we need differentiate?
stats.setActiveInstalls(0);
stats.setTotalDownloads(0);
stats.setNumberOfErrors(0);
} else {
stats.setActiveInstalls(jsonAppStats.getInt("1"));
stats.setTotalDownloads(jsonAppStats.getInt("5"));
stats.setNumberOfErrors(jsonAppStats.optInt("4"));
}
app.setLatestStats(stats);
apps.add(app);
}
return apps;
}
private static void pp(String name, JSONArray jsonArr) {
try {
String pp = jsonArr == null ? "null" : jsonArr.toString(2);
Log.d(TAG, String.format("%s: %s", name, pp));
FileUtils.writeToDebugDir(name + "-pp.json", pp);
} catch (JSONException e) {
Log.w(TAG, "Error printing JSON: " + e.getMessage(), e);
}
}
private static void pp(String name, JSONObject jsonObj) {
try {
String pp = jsonObj == null ? "null" : jsonObj.toString(2);
Log.d(TAG, String.format("%s: %s", name, pp));
FileUtils.writeToDebugDir(name + "-pp.json", pp);
} catch (JSONException e) {
Log.w(TAG, "Error printing JSON: " + e.getMessage(), e);
}
}
/**
* Parses the supplied JSON string and returns the number of comments.
*
* @param json
* @return
* @throws JSONException
*/
static int parseCommentsCount(String json) throws JSONException {
// Just extract the number of comments
/*
* null
* Array containing arrays of comments
* numberOfComments
*/
return new JSONObject(json).getJSONObject("result").getInt("2");
}
/**
* Parses the supplied JSON string and returns a list of comments.
*
* @param json
* @return
* @throws JSONException
*/
static List<Comment> parseComments(String json) throws JSONException {
List<Comment> comments = new ArrayList<Comment>();
/*
* null
* Array containing arrays of comments
* numberOfComments
*/
JSONArray jsonComments = new JSONObject(json).getJSONObject("result").getJSONArray("1");
int count = jsonComments.length();
for (int i = 0; i < count; i++) {
Comment comment = new Comment();
JSONObject jsonComment = jsonComments.getJSONObject(i);
// TODO These examples are out of date and need updating
/*
* null
* "gaia:17919762185957048423:1:vm:11887109942373535891", -- ID?
* "REVIEWERS_NAME",
* "1343652956570", -- DATE?
* RATING,
* null
* "COMMENT",
* null,
* "VERSION_NAME",
* [ null,
* "DEVICE_CODE_NAME",
* "DEVICE_MANFACTURER",
* "DEVICE_MODEL"
* ],
* "LOCALE",
* null,
* 0
*/
// Example with developer reply
/*
* [
* null,
* "gaia:12824185113034449316:1:vm:18363775304595766012",
* "Micka�l",
* "1350333837326",
* 1,
* "",
* "Nul\tNul!! N'arrive pas a scanner le moindre code barre!",
* 73,
* "3.2.5",
* [
* null,
* "X10i",
* "SEMC",
* "Xperia X10"
* ],
* "fr_FR",
* [
* null,
* "Prixing fonctionne pourtant bien sur Xperia X10. Essayez de prendre un minimum de recul, au moins 20 � 30cm, �vitez les ombres et les reflets. N'h�sitez pas � nous �crire sur [email protected] pour une assistance personnalis�e."
* ,
* null,
* "1350393460968"
* ],
* 1
* ]
*/
String uniqueId = jsonComment.getString("1");
comment.setUniqueId(uniqueId);
String user = jsonComment.optString("2");
if (user != null && !"".equals(user) && !"null".equals(user)) {
comment.setUser(user);
}
comment.setDate(parseDate(jsonComment.getLong("3")));
comment.setRating(jsonComment.getInt("4"));
String version = jsonComment.optString("7");
if (version != null && !"".equals(version) && !version.equals("null")) {
comment.setAppVersion(version);
}
String commentLang = jsonComment.optJSONObject("5").getString("1");
String commentText = jsonComment.optJSONObject("5").getString("3");
comment.setLanguage(commentLang);
comment.setOriginalText(commentText);
// overwritten if translation is available
comment.setText(commentText);
JSONObject translation = jsonComment.optJSONObject("11");
if (translation != null) {
String displayLanguage = Locale.getDefault().getLanguage();
String translationLang = translation.getString("1");
// Apparently, a translation body is not always provided
// Possibly happens if the translation fails or equals the original
if(translation.has("3")) {
String translationText = translation.getString("3");
if (translationLang.contains(displayLanguage)) {
comment.setText(translationText);
}
}
}
JSONObject jsonDevice = jsonComment.optJSONObject("8");
if (jsonDevice != null) {
String device = jsonDevice.optString("3");
JSONArray extraInfo = jsonDevice.optJSONArray("2");
if (extraInfo != null) {
device += " " + extraInfo.optString(0);
}
comment.setDevice(device.trim());
}
JSONObject jsonReply = jsonComment.optJSONObject("9");
if (jsonReply != null) {
Comment reply = new Comment(true);
reply.setText(jsonReply.getString("1"));
reply.setDate(parseDate(jsonReply.getLong("3")));
reply.setOriginalCommentDate(comment.getDate());
comment.setReply(reply);
}
comments.add(comment);
}
return comments;
}
static Comment parseCommentReplyResponse(String json) throws JSONException {
// {"result":{"1":{"1":"REPLY","3":"TIME_STAMP"},"2":true},"xsrf":"XSRF_TOKEN"}
// or
// {"error":{"data":{"1":ERROR_CODE},"code":ERROR_CODE}}
JSONObject jsonObj = new JSONObject(json);
if (jsonObj.has("error")) {
throw parseError(jsonObj, "replying to comments");
}
JSONObject replyObj = jsonObj.getJSONObject("result").getJSONObject("1");
Comment result = new Comment(true);
result.setText(replyObj.getString("1"));
result.setDate(parseDate(Long.parseLong(replyObj.getString("3"))));
return result;
}
private static DevConsoleException parseError(JSONObject jsonObj, String message)
throws JSONException {
JSONObject errorObj = jsonObj.getJSONObject("error");
String data = errorObj.getJSONObject("data").optString("1");
String errorCode = errorObj.optString("code");
return new DevConsoleException(String.format("Error %s: %s, errorCode=%s", message, data,
errorCode));
}
static RevenueSummary parseRevenueResponse(String json) throws JSONException {
JSONObject jsonObj = new JSONObject(json);
if (jsonObj.has("error")) {
throw parseError(jsonObj, "fetch revenue summary");
}
JSONObject resultObj = jsonObj.getJSONObject("result");
String currency = resultObj.optString("1");
// XXX does this really mean that the app has no revenue
if (currency == null || "".equals(currency)) {
return null;
}
// "6": "1376352000000"
long timestamp = resultObj.getLong("6");
// no time info, 00:00:00 GMT(?)
Date date = new Date(timestamp / 1000);
// 2 -total, 3 -sales, 4- in-app products
// we only use total (for now)
JSONObject revenueObj = resultObj.getJSONObject("2");
// even keys are for previous period
double lastDay = revenueObj.getDouble("1");
double last7Days = revenueObj.getDouble("3");
double last30Days = revenueObj.getDouble("5");
// NaN is treated like NULL -> DB error
double overall = revenueObj.optDouble("7", 0.0);
return RevenueSummary.createTotal(currency, date, lastDay, last7Days, last30Days, overall);
}
/**
* Parses the given date
*
* @param unixDateCode
* @return
*/
private static Date parseDate(long unixDateCode) {
return new Date(unixDateCode);
}
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public static Revenue parseLatestTotalRevenue(String json) throws IOException {
JsonReader reader = new JsonReader(new StringReader(json));
reader.setLenient(true);
String currency = null;
Date reportDate = null;
Date revenueDate = null;
String revenueType1 = null;
String revenueType2 = null;
double value = 0;
reader.beginObject();
while (reader.hasNext()) {
String name = reader.nextName();
if ("result".equals(name)) {
reader.beginObject();
while (reader.hasNext()) {
name = reader.nextName();
// 1: sales, 2: in-app, 3: subscriptions?
// XXX this doesn't handle the case where there is more
// than one, e.g. app sales + subscriptions
if ("1".equals(name) || "2".equals(name) || "3".equals(name)) {
// revenue list: date->amount
// [{"1":"1304103600000","2":{"2":234.0}},..{"1":"1304449200000","2":{"2":123.0}},...]
reader.beginObject();
while (reader.hasNext()) {
name = reader.nextName();
if ("1".equals(name)) {
reader.beginArray();
while (reader.hasNext()) {
reader.beginObject();
double dailyRevenue = 0;
Date date = null;
while (reader.hasNext()) {
name = reader.nextName();
if ("1".equals(name)) {
date = new Date(reader.nextLong());
if (revenueDate == null) {
revenueDate = (Date) date.clone();
}
} else if ("2".equals(name)) {
reader.beginObject();
while (reader.hasNext()) {
name = reader.nextName();
if ("2".equals(name)) {
dailyRevenue = reader.nextDouble();
if (date != null
&& date.getTime() > revenueDate
.getTime()) {
revenueDate = date;
value = dailyRevenue;
}
}
}
reader.endObject();
}
}
reader.endObject();
}
reader.endArray();
} else if ("2".equals(name)) {
// "APP", "IN_APP",
revenueType1 = reader.nextString();
} else if ("3".equals(name)) {
revenueType2 = reader.nextString();
}
}
reader.endObject();
} else if ("4".equals(name)) {
reportDate = new Date(reader.nextLong());
} else if ("5".equals(name)) {
currency = reader.nextString();
}
}
reader.endObject();
} else if ("xsrf".equals(name)) {
// consume XSRF
reader.nextString();
}
}
reader.endObject();
// XXX what happens when there is more than one type?
Revenue.Type type = Revenue.Type.TOTAL;
if ("APP".equals(revenueType1)) {
type = Revenue.Type.APP_SALES;
} else if ("IN_APP".equals(revenueType1)) {
type = Revenue.Type.IN_APP;
} else {
type = Revenue.Type.SUBSCRIPTIONS;
}
// XXX do we need the date?
// return new Revenue(type, revenueDate, currency, value);
return new Revenue(type, value, currency);
}
}
| src/com/github/andlyticsproject/console/v2/JsonParser.java | package com.github.andlyticsproject.console.v2;
import android.annotation.TargetApi;
import android.os.Build;
import android.util.JsonReader;
import android.util.Log;
import com.github.andlyticsproject.console.DevConsoleException;
import com.github.andlyticsproject.model.AppDetails;
import com.github.andlyticsproject.model.AppInfo;
import com.github.andlyticsproject.model.AppStats;
import com.github.andlyticsproject.model.Comment;
import com.github.andlyticsproject.model.Revenue;
import com.github.andlyticsproject.model.RevenueSummary;
import com.github.andlyticsproject.util.FileUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Locale;
/**
* This class contains static methods used to parse JSON from {@link DevConsoleV2}
*
* See {@link https://github.com/AndlyticsProject/andlytics/wiki/Developer-Console-v2} for some more
* documentation
*
*/
public class JsonParser {
private static final String TAG = JsonParser.class.getSimpleName();
private static final boolean DEBUG = false;
private JsonParser() {
}
/**
* Parses the supplied JSON string and adds the extracted ratings to the supplied
* {@link AppStats} object
*
* @param json
* @param stats
* @throws JSONException
*/
static void parseRatings(String json, AppStats stats) throws JSONException {
// Extract just the array with the values
JSONObject values = new JSONObject(json).getJSONObject("result").getJSONArray("1")
.getJSONObject(0);
// Ratings are at index 2 - 6
stats.setRating(values.getInt("2"), values.getInt("3"), values.getInt("4"),
values.getInt("5"), values.getInt("6"));
}
/**
* Parses the supplied JSON string and adds the extracted statistics to the supplied
* {@link AppStats} object
* based on the supplied statsType
* Not used at the moment
*
* @param json
* @param stats
* @param statsType
* @throws JSONException
*/
static void parseStatistics(String json, AppStats stats, int statsType) throws JSONException {
// Extract the top level values array
JSONObject values = new JSONObject(json).getJSONObject("result").getJSONObject("1");
/*
* null
* Nested array [null, [null, Array containing historical data]]
* null
* null
* null
* Nested arrays containing summary and historical data broken down by dimension e.g.
* Android version
* null
* null
* App name
*/
// For now we just care about todays value, later we may delve into the historical and
// dimensioned data
JSONArray historicalData = values.getJSONObject("1").getJSONArray("1");
JSONObject latestData = historicalData.getJSONObject(historicalData.length() - 1);
/*
* null
* Date
* [null, value]
*/
int latestValue = latestData.getJSONObject("2").getInt("1");
switch (statsType) {
case DevConsoleV2Protocol.STATS_TYPE_TOTAL_USER_INSTALLS:
stats.setTotalDownloads(latestValue);
break;
case DevConsoleV2Protocol.STATS_TYPE_ACTIVE_DEVICE_INSTALLS:
stats.setActiveInstalls(latestValue);
break;
default:
break;
}
}
/**
* Parses the supplied JSON string and builds a list of apps from it
*
* @param json
* @param accountName
* @param skipIncomplete
* @return List of apps
* @throws JSONException
*/
static List<AppInfo> parseAppInfos(String json, String accountName, boolean skipIncomplete)
throws JSONException {
Date now = new Date();
List<AppInfo> apps = new ArrayList<AppInfo>();
// Extract the base array containing apps
JSONObject result = new JSONObject(json).getJSONObject("result");
if (DEBUG) {
pp("result", result);
}
JSONArray jsonApps = result.optJSONArray("1");
if (DEBUG) {
pp("jsonApps", jsonApps);
}
if (jsonApps == null) {
// no apps yet?
return apps;
}
int numberOfApps = jsonApps.length();
Log.d(TAG, String.format("Found %d apps in JSON", numberOfApps));
for (int i = 0; i < numberOfApps; i++) {
AppInfo app = new AppInfo();
app.setAccount(accountName);
app.setLastUpdate(now);
// Per app:
// 1 : { 1: package name,
// 2 : { 1: [{1 : lang, 2: name, 3: description, 4: ??, 5: what's new}], 2 : ?? },
// 3 : ??,
// 4 : update history,
// 5 : price,
// 6 : update date,
// 7 : state?
// }
// 2 : {}
// 3 : { 1: active dnd, 2: # ratings, 3: avg rating, 4: ???, 5: total dnd }
// arrays have changed to objects, with the index as the key
/*
* Per app:
* null
* [ APP_INFO_ARRAY
* * null
* * packageName
* * Nested array with details
* * null
* * Nested array with version details
* * Nested array with price details
* * Last update Date
* * Number [1=published, 5 = draft?]
* ]
* null
* [ APP_STATS_ARRAY
* * null,
* * Active installs
* * Total ratings
* * Average rating
* * Errors
* * Total installs
* ]
*/
JSONObject jsonApp = jsonApps.getJSONObject(i);
JSONObject jsonAppInfo = jsonApp.getJSONObject("1");
if (DEBUG) {
pp("jsonAppInfo", jsonAppInfo);
}
String packageName = jsonAppInfo.getString("1");
// Look for "tmp.7238057230750432756094760456.235728507238057230542"
if (packageName == null
|| (packageName.startsWith("tmp.") && Character.isDigit(packageName.charAt(4)))) {
Log.d(TAG, String.format("Skipping draft app %d, package name=%s", i, packageName));
continue;
// Draft app
}
// Check number code and last updated date
// Published: 1
// Unpublished: 2
// Draft: 5
// Draft w/ in-app items?: 6
// TODO figure out the rest and add don't just skip, filter, etc. Cf. #223
int publishState = jsonAppInfo.optInt("7");
Log.d(TAG, String.format("%s: publishState=%d", packageName, publishState));
if (publishState != 1) {
// Not a published app, skipping
Log.d(TAG, String.format(
"Skipping app %d with state != 1: package name=%s: state=%d", i,
packageName, publishState));
continue;
}
app.setPublishState(publishState);
app.setPackageName(packageName);
/*
* Per app details:
* 1: Country code
* 2: App Name
* 3: Description
* 4: Promo text
* 5: Last what's new
*/
// skip if we can't get all the data
// XXX should we just let this crash so we know there is a problem?
if (!jsonAppInfo.has("2")) {
if (skipIncomplete) {
Log.d(TAG, String.format(
"Skipping app %d because no app details found: package name=%s", i,
packageName));
} else {
Log.d(TAG, "Adding incomplete app: " + packageName);
apps.add(app);
}
continue;
}
if (!jsonAppInfo.has("4")) {
if (skipIncomplete) {
Log.d(TAG, String.format(
"Skipping app %d because no versions info found: package name=%s", i,
packageName));
} else {
Log.d(TAG, "Adding incomplete app: " + packageName);
apps.add(app);
}
continue;
}
JSONObject appDetails = jsonAppInfo.getJSONObject("2").getJSONArray("1")
.getJSONObject(0);
if (DEBUG) {
pp("appDetails", appDetails);
}
app.setName(appDetails.getString("2"));
String description = appDetails.getString("3");
String changelog = appDetails.optString("5");
Long lastPlayStoreUpdate = jsonAppInfo.optLong("6");
AppDetails details = new AppDetails(description, changelog, lastPlayStoreUpdate);
app.setDetails(details);
/*
* Per app version details:
* null
* null
* packageName
* versionNumber
* versionName
* null
* Array with app icon [null,null,null,icon]
*/
// XXX
JSONArray appVersions = jsonAppInfo.getJSONObject("4").getJSONObject("1")
.optJSONArray("1");
if (DEBUG) {
pp("appVersions", appVersions);
}
if (appVersions == null) {
if (skipIncomplete) {
Log.d(TAG, String.format(
"Skipping app %d because no versions info found: package name=%s", i,
packageName));
} else {
Log.d(TAG, "Adding incomplete app: " + packageName);
apps.add(app);
}
continue;
}
JSONObject lastAppVersionDetails = appVersions.getJSONObject(appVersions.length() - 1)
.getJSONObject("2");
if (DEBUG) {
pp("lastAppVersionDetails", lastAppVersionDetails);
}
app.setVersionName(lastAppVersionDetails.getString("4"));
app.setIconUrl(lastAppVersionDetails.getJSONObject("6").getString("3"));
// App stats
/*
* null,
* Active installs
* Total ratings
* Average rating
* Errors
* Total installs
*/
// XXX this index might not be correct for all apps?
// 3 : { 1: active dnd, 2: # ratings, 3: avg rating, 4: #errors?, 5: total dnd }
JSONObject jsonAppStats = jsonApp.optJSONObject("3");
if (DEBUG) {
pp("jsonAppStats", jsonAppStats);
}
if (jsonAppStats == null) {
if (skipIncomplete) {
Log.d(TAG, String.format(
"Skipping app %d because no stats found: package name=%s", i,
packageName));
} else {
Log.d(TAG, "Adding incomplete app: " + packageName);
apps.add(app);
}
continue;
}
AppStats stats = new AppStats();
stats.setDate(now);
if (jsonAppStats.length() < 4) {
// no statistics (yet?) or weird format
// TODO do we need differentiate?
stats.setActiveInstalls(0);
stats.setTotalDownloads(0);
stats.setNumberOfErrors(0);
} else {
stats.setActiveInstalls(jsonAppStats.getInt("1"));
stats.setTotalDownloads(jsonAppStats.getInt("5"));
stats.setNumberOfErrors(jsonAppStats.optInt("4"));
}
app.setLatestStats(stats);
apps.add(app);
}
return apps;
}
private static void pp(String name, JSONArray jsonArr) {
try {
String pp = jsonArr == null ? "null" : jsonArr.toString(2);
Log.d(TAG, String.format("%s: %s", name, pp));
FileUtils.writeToDebugDir(name + "-pp.json", pp);
} catch (JSONException e) {
Log.w(TAG, "Error printing JSON: " + e.getMessage(), e);
}
}
private static void pp(String name, JSONObject jsonObj) {
try {
String pp = jsonObj == null ? "null" : jsonObj.toString(2);
Log.d(TAG, String.format("%s: %s", name, pp));
FileUtils.writeToDebugDir(name + "-pp.json", pp);
} catch (JSONException e) {
Log.w(TAG, "Error printing JSON: " + e.getMessage(), e);
}
}
/**
* Parses the supplied JSON string and returns the number of comments.
*
* @param json
* @return
* @throws JSONException
*/
static int parseCommentsCount(String json) throws JSONException {
// Just extract the number of comments
/*
* null
* Array containing arrays of comments
* numberOfComments
*/
return new JSONObject(json).getJSONObject("result").getInt("2");
}
/**
* Parses the supplied JSON string and returns a list of comments.
*
* @param json
* @return
* @throws JSONException
*/
static List<Comment> parseComments(String json) throws JSONException {
List<Comment> comments = new ArrayList<Comment>();
/*
* null
* Array containing arrays of comments
* numberOfComments
*/
JSONArray jsonComments = new JSONObject(json).getJSONObject("result").getJSONArray("1");
int count = jsonComments.length();
for (int i = 0; i < count; i++) {
Comment comment = new Comment();
JSONObject jsonComment = jsonComments.getJSONObject(i);
// TODO These examples are out of date and need updating
/*
* null
* "gaia:17919762185957048423:1:vm:11887109942373535891", -- ID?
* "REVIEWERS_NAME",
* "1343652956570", -- DATE?
* RATING,
* null
* "COMMENT",
* null,
* "VERSION_NAME",
* [ null,
* "DEVICE_CODE_NAME",
* "DEVICE_MANFACTURER",
* "DEVICE_MODEL"
* ],
* "LOCALE",
* null,
* 0
*/
// Example with developer reply
/*
* [
* null,
* "gaia:12824185113034449316:1:vm:18363775304595766012",
* "Micka�l",
* "1350333837326",
* 1,
* "",
* "Nul\tNul!! N'arrive pas a scanner le moindre code barre!",
* 73,
* "3.2.5",
* [
* null,
* "X10i",
* "SEMC",
* "Xperia X10"
* ],
* "fr_FR",
* [
* null,
* "Prixing fonctionne pourtant bien sur Xperia X10. Essayez de prendre un minimum de recul, au moins 20 � 30cm, �vitez les ombres et les reflets. N'h�sitez pas � nous �crire sur [email protected] pour une assistance personnalis�e."
* ,
* null,
* "1350393460968"
* ],
* 1
* ]
*/
String uniqueId = jsonComment.getString("1");
comment.setUniqueId(uniqueId);
String user = jsonComment.optString("2");
if (user != null && !"".equals(user) && !"null".equals(user)) {
comment.setUser(user);
}
comment.setDate(parseDate(jsonComment.getLong("3")));
comment.setRating(jsonComment.getInt("4"));
String version = jsonComment.optString("7");
if (version != null && !"".equals(version) && !version.equals("null")) {
comment.setAppVersion(version);
}
String commentLang = jsonComment.optJSONObject("5").getString("1");
String commentText = jsonComment.optJSONObject("5").getString("3");
comment.setLanguage(commentLang);
comment.setOriginalText(commentText);
// overwritten if translation is available
comment.setText(commentText);
JSONObject translation = jsonComment.optJSONObject("11");
if (translation != null) {
String displayLanguage = Locale.getDefault().getLanguage();
String translationLang = translation.getString("1");
String translationText = translation.getString("3");
if (translationLang.contains(displayLanguage)) {
comment.setText(translationText);
}
}
JSONObject jsonDevice = jsonComment.optJSONObject("8");
if (jsonDevice != null) {
String device = jsonDevice.optString("3");
JSONArray extraInfo = jsonDevice.optJSONArray("2");
if (extraInfo != null) {
device += " " + extraInfo.optString(0);
}
comment.setDevice(device.trim());
}
JSONObject jsonReply = jsonComment.optJSONObject("9");
if (jsonReply != null) {
Comment reply = new Comment(true);
reply.setText(jsonReply.getString("1"));
reply.setDate(parseDate(jsonReply.getLong("3")));
reply.setOriginalCommentDate(comment.getDate());
comment.setReply(reply);
}
comments.add(comment);
}
return comments;
}
static Comment parseCommentReplyResponse(String json) throws JSONException {
// {"result":{"1":{"1":"REPLY","3":"TIME_STAMP"},"2":true},"xsrf":"XSRF_TOKEN"}
// or
// {"error":{"data":{"1":ERROR_CODE},"code":ERROR_CODE}}
JSONObject jsonObj = new JSONObject(json);
if (jsonObj.has("error")) {
throw parseError(jsonObj, "replying to comments");
}
JSONObject replyObj = jsonObj.getJSONObject("result").getJSONObject("1");
Comment result = new Comment(true);
result.setText(replyObj.getString("1"));
result.setDate(parseDate(Long.parseLong(replyObj.getString("3"))));
return result;
}
private static DevConsoleException parseError(JSONObject jsonObj, String message)
throws JSONException {
JSONObject errorObj = jsonObj.getJSONObject("error");
String data = errorObj.getJSONObject("data").optString("1");
String errorCode = errorObj.optString("code");
return new DevConsoleException(String.format("Error %s: %s, errorCode=%s", message, data,
errorCode));
}
static RevenueSummary parseRevenueResponse(String json) throws JSONException {
JSONObject jsonObj = new JSONObject(json);
if (jsonObj.has("error")) {
throw parseError(jsonObj, "fetch revenue summary");
}
JSONObject resultObj = jsonObj.getJSONObject("result");
String currency = resultObj.optString("1");
// XXX does this really mean that the app has no revenue
if (currency == null || "".equals(currency)) {
return null;
}
// "6": "1376352000000"
long timestamp = resultObj.getLong("6");
// no time info, 00:00:00 GMT(?)
Date date = new Date(timestamp / 1000);
// 2 -total, 3 -sales, 4- in-app products
// we only use total (for now)
JSONObject revenueObj = resultObj.getJSONObject("2");
// even keys are for previous period
double lastDay = revenueObj.getDouble("1");
double last7Days = revenueObj.getDouble("3");
double last30Days = revenueObj.getDouble("5");
// NaN is treated like NULL -> DB error
double overall = revenueObj.optDouble("7", 0.0);
return RevenueSummary.createTotal(currency, date, lastDay, last7Days, last30Days, overall);
}
/**
* Parses the given date
*
* @param unixDateCode
* @return
*/
private static Date parseDate(long unixDateCode) {
return new Date(unixDateCode);
}
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
public static Revenue parseLatestTotalRevenue(String json) throws IOException {
JsonReader reader = new JsonReader(new StringReader(json));
reader.setLenient(true);
String currency = null;
Date reportDate = null;
Date revenueDate = null;
String revenueType1 = null;
String revenueType2 = null;
double value = 0;
reader.beginObject();
while (reader.hasNext()) {
String name = reader.nextName();
if ("result".equals(name)) {
reader.beginObject();
while (reader.hasNext()) {
name = reader.nextName();
// 1: sales, 2: in-app, 3: subscriptions?
// XXX this doesn't handle the case where there is more
// than one, e.g. app sales + subscriptions
if ("1".equals(name) || "2".equals(name) || "3".equals(name)) {
// revenue list: date->amount
// [{"1":"1304103600000","2":{"2":234.0}},..{"1":"1304449200000","2":{"2":123.0}},...]
reader.beginObject();
while (reader.hasNext()) {
name = reader.nextName();
if ("1".equals(name)) {
reader.beginArray();
while (reader.hasNext()) {
reader.beginObject();
double dailyRevenue = 0;
Date date = null;
while (reader.hasNext()) {
name = reader.nextName();
if ("1".equals(name)) {
date = new Date(reader.nextLong());
if (revenueDate == null) {
revenueDate = (Date) date.clone();
}
} else if ("2".equals(name)) {
reader.beginObject();
while (reader.hasNext()) {
name = reader.nextName();
if ("2".equals(name)) {
dailyRevenue = reader.nextDouble();
if (date != null
&& date.getTime() > revenueDate
.getTime()) {
revenueDate = date;
value = dailyRevenue;
}
}
}
reader.endObject();
}
}
reader.endObject();
}
reader.endArray();
} else if ("2".equals(name)) {
// "APP", "IN_APP",
revenueType1 = reader.nextString();
} else if ("3".equals(name)) {
revenueType2 = reader.nextString();
}
}
reader.endObject();
} else if ("4".equals(name)) {
reportDate = new Date(reader.nextLong());
} else if ("5".equals(name)) {
currency = reader.nextString();
}
}
reader.endObject();
} else if ("xsrf".equals(name)) {
// consume XSRF
reader.nextString();
}
}
reader.endObject();
// XXX what happens when there is more than one type?
Revenue.Type type = Revenue.Type.TOTAL;
if ("APP".equals(revenueType1)) {
type = Revenue.Type.APP_SALES;
} else if ("IN_APP".equals(revenueType1)) {
type = Revenue.Type.IN_APP;
} else {
type = Revenue.Type.SUBSCRIPTIONS;
}
// XXX do we need the date?
// return new Revenue(type, revenueDate, currency, value);
return new Revenue(type, value, currency);
}
}
| Fixed comment parsing, translation json does not always contain a body
field ("3"). | src/com/github/andlyticsproject/console/v2/JsonParser.java | Fixed comment parsing, translation json does not always contain a body field ("3"). | <ide><path>rc/com/github/andlyticsproject/console/v2/JsonParser.java
<ide> if (translation != null) {
<ide> String displayLanguage = Locale.getDefault().getLanguage();
<ide> String translationLang = translation.getString("1");
<del> String translationText = translation.getString("3");
<del> if (translationLang.contains(displayLanguage)) {
<del> comment.setText(translationText);
<add>
<add> // Apparently, a translation body is not always provided
<add> // Possibly happens if the translation fails or equals the original
<add> if(translation.has("3")) {
<add> String translationText = translation.getString("3");
<add> if (translationLang.contains(displayLanguage)) {
<add> comment.setText(translationText);
<add> }
<ide> }
<ide> }
<ide> |
|
Java | apache-2.0 | 32b33bea0fe1e823d5f5202be02c4894fc194d97 | 0 | mufaddalq/cloudstack-datera-driver,mufaddalq/cloudstack-datera-driver,mufaddalq/cloudstack-datera-driver,mufaddalq/cloudstack-datera-driver,mufaddalq/cloudstack-datera-driver,mufaddalq/cloudstack-datera-driver | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.cloud.storage;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import javax.inject.Inject;
import javax.naming.ConfigurationException;
import com.cloud.utils.DateUtil;
import com.cloud.utils.EnumUtils;
import com.cloud.utils.NumbersUtil;
import com.cloud.utils.Pair;
import com.cloud.utils.UriUtils;
import org.apache.log4j.Logger;
import org.springframework.stereotype.Component;
import org.apache.cloudstack.api.BaseCmd;
import org.apache.cloudstack.api.command.user.volume.AttachVolumeCmd;
import org.apache.cloudstack.api.command.user.volume.CreateVolumeCmd;
import org.apache.cloudstack.api.command.user.volume.DetachVolumeCmd;
import org.apache.cloudstack.api.command.user.volume.ExtractVolumeCmd;
import org.apache.cloudstack.api.command.user.volume.MigrateVolumeCmd;
import org.apache.cloudstack.api.command.user.volume.ResizeVolumeCmd;
import org.apache.cloudstack.api.command.user.volume.UpdateVolumeCmd;
import org.apache.cloudstack.api.command.user.volume.UploadVolumeCmd;
import org.apache.cloudstack.engine.subsystem.api.storage.ChapInfo;
import org.apache.cloudstack.engine.subsystem.api.storage.DataStore;
import org.apache.cloudstack.engine.subsystem.api.storage.DataStoreManager;
import org.apache.cloudstack.engine.subsystem.api.storage.DataStoreProviderManager;
import org.apache.cloudstack.engine.subsystem.api.storage.HostScope;
import org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreInfo;
import org.apache.cloudstack.engine.subsystem.api.storage.Scope;
import org.apache.cloudstack.engine.subsystem.api.storage.SnapshotDataFactory;
import org.apache.cloudstack.engine.subsystem.api.storage.SnapshotInfo;
import org.apache.cloudstack.engine.subsystem.api.storage.StoragePoolAllocator;
import org.apache.cloudstack.engine.subsystem.api.storage.TemplateDataFactory;
import org.apache.cloudstack.engine.subsystem.api.storage.TemplateInfo;
import org.apache.cloudstack.engine.subsystem.api.storage.VolumeDataFactory;
import org.apache.cloudstack.engine.subsystem.api.storage.VolumeInfo;
import org.apache.cloudstack.engine.subsystem.api.storage.VolumeService;
import org.apache.cloudstack.engine.subsystem.api.storage.VolumeService.VolumeApiResult;
import org.apache.cloudstack.framework.async.AsyncCallFuture;
import org.apache.cloudstack.storage.command.AttachAnswer;
import org.apache.cloudstack.storage.command.AttachCommand;
import org.apache.cloudstack.storage.command.CommandResult;
import org.apache.cloudstack.storage.command.DettachCommand;
import org.apache.cloudstack.storage.datastore.db.PrimaryDataStoreDao;
import org.apache.cloudstack.storage.datastore.db.StoragePoolDetailsDao;
import org.apache.cloudstack.storage.datastore.db.StoragePoolVO;
import org.apache.cloudstack.storage.datastore.db.TemplateDataStoreDao;
import org.apache.cloudstack.storage.datastore.db.TemplateDataStoreVO;
import org.apache.cloudstack.storage.datastore.db.VolumeDataStoreDao;
import org.apache.cloudstack.storage.datastore.db.VolumeDataStoreVO;
import org.apache.cloudstack.storage.image.datastore.ImageStoreEntity;
import com.cloud.agent.AgentManager;
import com.cloud.agent.api.Answer;
import com.cloud.agent.api.to.DataTO;
import com.cloud.agent.api.to.DiskTO;
import com.cloud.agent.api.to.VirtualMachineTO;
import com.cloud.alert.AlertManager;
import com.cloud.api.ApiDBUtils;
import com.cloud.async.AsyncJobExecutor;
import com.cloud.async.AsyncJobManager;
import com.cloud.async.AsyncJobVO;
import com.cloud.async.BaseAsyncJobExecutor;
import com.cloud.capacity.CapacityManager;
import com.cloud.capacity.dao.CapacityDao;
import com.cloud.configuration.Config;
import com.cloud.configuration.ConfigurationManager;
import com.cloud.configuration.Resource.ResourceType;
import com.cloud.configuration.dao.ConfigurationDao;
import com.cloud.consoleproxy.ConsoleProxyManager;
import com.cloud.dc.ClusterVO;
import com.cloud.dc.DataCenter;
import com.cloud.dc.DataCenterVO;
import com.cloud.dc.HostPodVO;
import com.cloud.dc.dao.ClusterDao;
import com.cloud.dc.dao.DataCenterDao;
import com.cloud.dc.dao.HostPodDao;
import com.cloud.deploy.DeployDestination;
import com.cloud.domain.Domain;
import com.cloud.domain.dao.DomainDao;
import com.cloud.event.ActionEvent;
import com.cloud.event.EventTypes;
import com.cloud.event.UsageEventUtils;
import com.cloud.event.dao.EventDao;
import com.cloud.exception.ConcurrentOperationException;
import com.cloud.exception.InsufficientStorageCapacityException;
import com.cloud.exception.InvalidParameterValueException;
import com.cloud.exception.PermissionDeniedException;
import com.cloud.exception.ResourceAllocationException;
import com.cloud.exception.StorageUnavailableException;
import com.cloud.host.Host;
import com.cloud.host.HostVO;
import com.cloud.host.dao.HostDao;
import com.cloud.hypervisor.Hypervisor.HypervisorType;
import com.cloud.hypervisor.HypervisorCapabilitiesVO;
import com.cloud.hypervisor.HypervisorGuruManager;
import com.cloud.hypervisor.dao.HypervisorCapabilitiesDao;
import com.cloud.network.NetworkModel;
import com.cloud.org.Grouping;
import com.cloud.resource.ResourceManager;
import com.cloud.server.ManagementServer;
import com.cloud.service.ServiceOfferingVO;
import com.cloud.service.dao.ServiceOfferingDao;
import com.cloud.storage.Storage.ImageFormat;
import com.cloud.storage.Volume.Type;
import com.cloud.storage.dao.DiskOfferingDao;
import com.cloud.storage.dao.SnapshotDao;
import com.cloud.storage.dao.SnapshotPolicyDao;
import com.cloud.storage.dao.StoragePoolHostDao;
import com.cloud.storage.dao.StoragePoolWorkDao;
import com.cloud.storage.dao.UploadDao;
import com.cloud.storage.dao.VMTemplateDao;
import com.cloud.storage.dao.VMTemplatePoolDao;
import com.cloud.storage.dao.VolumeDao;
import com.cloud.storage.dao.VolumeDetailsDao;
import com.cloud.storage.download.DownloadMonitor;
import com.cloud.storage.secondary.SecondaryStorageVmManager;
import com.cloud.storage.snapshot.SnapshotApiService;
import com.cloud.storage.snapshot.SnapshotManager;
import com.cloud.storage.snapshot.SnapshotScheduler;
import com.cloud.storage.upload.UploadMonitor;
import com.cloud.tags.dao.ResourceTagDao;
import com.cloud.template.TemplateManager;
import com.cloud.user.Account;
import com.cloud.user.AccountManager;
import com.cloud.user.ResourceLimitService;
import com.cloud.user.UserContext;
import com.cloud.user.VmDiskStatisticsVO;
import com.cloud.user.dao.AccountDao;
import com.cloud.user.dao.UserDao;
import com.cloud.user.dao.VmDiskStatisticsDao;
import com.cloud.uservm.UserVm;
import com.cloud.utils.component.ManagerBase;
import com.cloud.utils.db.DB;
import com.cloud.utils.db.Transaction;
import com.cloud.utils.exception.CloudRuntimeException;
import com.cloud.utils.fsm.NoTransitionException;
import com.cloud.utils.fsm.StateMachine2;
import com.cloud.vm.DiskProfile;
import com.cloud.vm.UserVmManager;
import com.cloud.vm.UserVmVO;
import com.cloud.vm.VMInstanceVO;
import com.cloud.vm.VirtualMachine;
import com.cloud.vm.VirtualMachine.State;
import com.cloud.vm.VirtualMachineManager;
import com.cloud.vm.VirtualMachineProfile;
import com.cloud.vm.dao.ConsoleProxyDao;
import com.cloud.vm.dao.DomainRouterDao;
import com.cloud.vm.dao.SecondaryStorageVmDao;
import com.cloud.vm.dao.UserVmDao;
import com.cloud.vm.dao.VMInstanceDao;
import com.cloud.vm.snapshot.VMSnapshotVO;
import com.cloud.vm.snapshot.dao.VMSnapshotDao;
@Component
public class VolumeManagerImpl extends ManagerBase implements VolumeManager {
private static final Logger s_logger = Logger
.getLogger(VolumeManagerImpl.class);
@Inject
protected UserVmManager _userVmMgr;
@Inject
protected AgentManager _agentMgr;
@Inject
protected TemplateManager _tmpltMgr;
@Inject
protected AsyncJobManager _asyncMgr;
@Inject
protected SnapshotManager _snapshotMgr;
@Inject
protected SnapshotScheduler _snapshotScheduler;
@Inject
protected AccountManager _accountMgr;
@Inject
protected ConfigurationManager _configMgr;
@Inject
protected ConsoleProxyManager _consoleProxyMgr;
@Inject
protected SecondaryStorageVmManager _secStorageMgr;
@Inject
protected NetworkModel _networkMgr;
@Inject
protected ServiceOfferingDao _serviceOfferingDao;
@Inject
protected VolumeDao _volsDao;
@Inject
protected HostDao _hostDao;
@Inject
protected ConsoleProxyDao _consoleProxyDao;
@Inject
protected SnapshotDao _snapshotDao;
@Inject
protected SnapshotManager _snapMgr;
@Inject
protected SnapshotPolicyDao _snapshotPolicyDao;
@Inject
protected StoragePoolHostDao _storagePoolHostDao;
@Inject
StoragePoolDetailsDao storagePoolDetailsDao;
@Inject
protected AlertManager _alertMgr;
@Inject
protected TemplateDataStoreDao _vmTemplateStoreDao = null;
@Inject
protected VMTemplatePoolDao _vmTemplatePoolDao = null;
@Inject
protected VMTemplateDao _vmTemplateDao = null;
@Inject
protected StoragePoolHostDao _poolHostDao = null;
@Inject
protected UserVmDao _userVmDao;
@Inject
VolumeDataStoreDao _volumeStoreDao;
@Inject
protected VMInstanceDao _vmInstanceDao;
@Inject
protected PrimaryDataStoreDao _storagePoolDao = null;
@Inject
protected CapacityDao _capacityDao;
@Inject
protected CapacityManager _capacityMgr;
@Inject
protected DiskOfferingDao _diskOfferingDao;
@Inject
protected AccountDao _accountDao;
@Inject
protected EventDao _eventDao = null;
@Inject
protected DataCenterDao _dcDao = null;
@Inject
protected HostPodDao _podDao = null;
@Inject
protected VMTemplateDao _templateDao;
@Inject
protected ServiceOfferingDao _offeringDao;
@Inject
protected DomainDao _domainDao;
@Inject
protected UserDao _userDao;
@Inject
protected ClusterDao _clusterDao;
@Inject
protected VirtualMachineManager _vmMgr;
@Inject
protected DomainRouterDao _domrDao;
@Inject
protected SecondaryStorageVmDao _secStrgDao;
@Inject
protected StoragePoolWorkDao _storagePoolWorkDao;
@Inject
protected HypervisorGuruManager _hvGuruMgr;
@Inject
protected VolumeDao _volumeDao;
@Inject
protected OCFS2Manager _ocfs2Mgr;
@Inject
protected ResourceLimitService _resourceLimitMgr;
@Inject
protected SecondaryStorageVmManager _ssvmMgr;
@Inject
protected ResourceManager _resourceMgr;
@Inject
protected DownloadMonitor _downloadMonitor;
@Inject
protected ResourceTagDao _resourceTagDao;
@Inject
protected VmDiskStatisticsDao _vmDiskStatsDao;
@Inject
protected VMSnapshotDao _vmSnapshotDao;
@Inject
protected List<StoragePoolAllocator> _storagePoolAllocators;
@Inject
ConfigurationDao _configDao;
@Inject
VolumeDetailsDao _volDetailDao;
@Inject
ManagementServer _msServer;
@Inject
DataStoreManager dataStoreMgr;
@Inject
DataStoreProviderManager dataStoreProviderMgr;
@Inject
VolumeService volService;
@Inject
VolumeDataFactory volFactory;
@Inject
TemplateDataFactory tmplFactory;
@Inject
SnapshotDataFactory snapshotFactory;
@Inject
SnapshotApiService snapshotMgr;
@Inject
UploadMonitor _uploadMonitor;
@Inject
UploadDao _uploadDao;
private int _copyvolumewait;
@Inject
protected HypervisorCapabilitiesDao _hypervisorCapabilitiesDao;
private final StateMachine2<Volume.State, Volume.Event, Volume> _volStateMachine;
@Inject
StorageManager storageMgr;
private int _customDiskOfferingMinSize = 1;
private final int _customDiskOfferingMaxSize = 1024;
private long _maxVolumeSizeInGb;
private boolean _recreateSystemVmEnabled;
public VolumeManagerImpl() {
_volStateMachine = Volume.State.getStateMachine();
}
@Override
public VolumeInfo moveVolume(VolumeInfo volume, long destPoolDcId,
Long destPoolPodId, Long destPoolClusterId,
HypervisorType dataDiskHyperType)
throws ConcurrentOperationException {
// Find a destination storage pool with the specified criteria
DiskOfferingVO diskOffering = _diskOfferingDao.findById(volume
.getDiskOfferingId());
DiskProfile dskCh = new DiskProfile(volume.getId(),
volume.getVolumeType(), volume.getName(), diskOffering.getId(),
diskOffering.getDiskSize(), diskOffering.getTagsArray(),
diskOffering.getUseLocalStorage(),
diskOffering.isRecreatable(), null);
dskCh.setHyperType(dataDiskHyperType);
DataCenterVO destPoolDataCenter = _dcDao.findById(destPoolDcId);
HostPodVO destPoolPod = _podDao.findById(destPoolPodId);
StoragePool destPool = storageMgr.findStoragePool(dskCh,
destPoolDataCenter, destPoolPod, destPoolClusterId, null, null,
new HashSet<StoragePool>());
if (destPool == null) {
throw new CloudRuntimeException(
"Failed to find a storage pool with enough capacity to move the volume to.");
}
Volume newVol = migrateVolume(volume, destPool);
return volFactory.getVolume(newVol.getId());
}
/*
* Upload the volume to secondary storage.
*/
@Override
@DB
@ActionEvent(eventType = EventTypes.EVENT_VOLUME_UPLOAD, eventDescription = "uploading volume", async = true)
public VolumeVO uploadVolume(UploadVolumeCmd cmd)
throws ResourceAllocationException {
Account caller = UserContext.current().getCaller();
long ownerId = cmd.getEntityOwnerId();
Account owner = _accountDao.findById(ownerId);
Long zoneId = cmd.getZoneId();
String volumeName = cmd.getVolumeName();
String url = cmd.getUrl();
String format = cmd.getFormat();
String imageStoreUuid = cmd.getImageStoreUuid();
DataStore store = _tmpltMgr.getImageStore(imageStoreUuid, zoneId);
validateVolume(caller, ownerId, zoneId, volumeName, url, format);
VolumeVO volume = persistVolume(owner, zoneId, volumeName,
url, cmd.getFormat());
VolumeInfo vol = volFactory.getVolume(volume.getId());
RegisterVolumePayload payload = new RegisterVolumePayload(cmd.getUrl(), cmd.getChecksum(),
cmd.getFormat());
vol.addPayload(payload);
volService.registerVolume(vol, store);
return volume;
}
private boolean validateVolume(Account caller, long ownerId, Long zoneId,
String volumeName, String url, String format)
throws ResourceAllocationException {
// permission check
_accountMgr.checkAccess(caller, null, true,
_accountMgr.getActiveAccountById(ownerId));
// Check that the resource limit for volumes won't be exceeded
_resourceLimitMgr.checkResourceLimit(_accountMgr.getAccount(ownerId),
ResourceType.volume);
// Verify that zone exists
DataCenterVO zone = _dcDao.findById(zoneId);
if (zone == null) {
throw new InvalidParameterValueException(
"Unable to find zone by id " + zoneId);
}
// Check if zone is disabled
if (Grouping.AllocationState.Disabled == zone.getAllocationState()
&& !_accountMgr.isRootAdmin(caller.getType())) {
throw new PermissionDeniedException(
"Cannot perform this operation, Zone is currently disabled: "
+ zoneId);
}
if (url.toLowerCase().contains("file://")) {
throw new InvalidParameterValueException(
"File:// type urls are currently unsupported");
}
ImageFormat imgfmt = ImageFormat.valueOf(format.toUpperCase());
if (imgfmt == null) {
throw new IllegalArgumentException("Image format is incorrect "
+ format + ". Supported formats are "
+ EnumUtils.listValues(ImageFormat.values()));
}
String userSpecifiedName = volumeName;
if (userSpecifiedName == null) {
userSpecifiedName = getRandomVolumeName();
}
if ((!url.toLowerCase().endsWith("vhd"))
&& (!url.toLowerCase().endsWith("vhd.zip"))
&& (!url.toLowerCase().endsWith("vhd.bz2"))
&& (!url.toLowerCase().endsWith("vhd.gz"))
&& (!url.toLowerCase().endsWith("qcow2"))
&& (!url.toLowerCase().endsWith("qcow2.zip"))
&& (!url.toLowerCase().endsWith("qcow2.bz2"))
&& (!url.toLowerCase().endsWith("qcow2.gz"))
&& (!url.toLowerCase().endsWith("ova"))
&& (!url.toLowerCase().endsWith("ova.zip"))
&& (!url.toLowerCase().endsWith("ova.bz2"))
&& (!url.toLowerCase().endsWith("ova.gz"))
&& (!url.toLowerCase().endsWith("img"))
&& (!url.toLowerCase().endsWith("raw"))) {
throw new InvalidParameterValueException("Please specify a valid "
+ format.toLowerCase());
}
if ((format.equalsIgnoreCase("vhd") && (!url.toLowerCase().endsWith(
".vhd")
&& !url.toLowerCase().endsWith("vhd.zip")
&& !url.toLowerCase().endsWith("vhd.bz2") && !url.toLowerCase()
.endsWith("vhd.gz")))
|| (format.equalsIgnoreCase("qcow2") && (!url.toLowerCase()
.endsWith(".qcow2")
&& !url.toLowerCase().endsWith("qcow2.zip")
&& !url.toLowerCase().endsWith("qcow2.bz2") && !url
.toLowerCase().endsWith("qcow2.gz")))
|| (format.equalsIgnoreCase("ova") && (!url.toLowerCase()
.endsWith(".ova")
&& !url.toLowerCase().endsWith("ova.zip")
&& !url.toLowerCase().endsWith("ova.bz2") && !url
.toLowerCase().endsWith("ova.gz")))
|| (format.equalsIgnoreCase("raw") && (!url.toLowerCase()
.endsWith(".img") && !url.toLowerCase().endsWith("raw")))) {
throw new InvalidParameterValueException(
"Please specify a valid URL. URL:" + url
+ " is an invalid for the format "
+ format.toLowerCase());
}
UriUtils.validateUrl(url);
// Check that the resource limit for secondary storage won't be exceeded
_resourceLimitMgr.checkResourceLimit(_accountMgr.getAccount(ownerId), ResourceType.secondary_storage,
UriUtils.getRemoteSize(url));
return false;
}
@Override
public VolumeVO allocateDuplicateVolume(VolumeVO oldVol, Long templateId) {
VolumeVO newVol = new VolumeVO(oldVol.getVolumeType(),
oldVol.getName(), oldVol.getDataCenterId(),
oldVol.getDomainId(), oldVol.getAccountId(),
oldVol.getDiskOfferingId(), oldVol.getSize(),
oldVol.getMinIops(), oldVol.getMaxIops(), oldVol.get_iScsiName());
if (templateId != null) {
newVol.setTemplateId(templateId);
} else {
newVol.setTemplateId(oldVol.getTemplateId());
}
newVol.setDeviceId(oldVol.getDeviceId());
newVol.setInstanceId(oldVol.getInstanceId());
newVol.setRecreatable(oldVol.isRecreatable());
newVol.setFormat(oldVol.getFormat());
return _volsDao.persist(newVol);
}
@DB
protected VolumeInfo createVolumeFromSnapshot(VolumeVO volume,
SnapshotVO snapshot) throws StorageUnavailableException {
Account account = _accountDao.findById(volume.getAccountId());
final HashSet<StoragePool> poolsToAvoid = new HashSet<StoragePool>();
StoragePool pool = null;
Set<Long> podsToAvoid = new HashSet<Long>();
Pair<HostPodVO, Long> pod = null;
DiskOfferingVO diskOffering = _diskOfferingDao
.findByIdIncludingRemoved(volume.getDiskOfferingId());
DataCenterVO dc = _dcDao.findById(volume.getDataCenterId());
DiskProfile dskCh = new DiskProfile(volume, diskOffering,
snapshot.getHypervisorType());
// Determine what pod to store the volume in
while ((pod = _resourceMgr.findPod(null, null, dc, account.getId(),
podsToAvoid)) != null) {
podsToAvoid.add(pod.first().getId());
// Determine what storage pool to store the volume in
while ((pool = storageMgr.findStoragePool(dskCh, dc, pod.first(), null, null,
null, poolsToAvoid)) != null) {
break;
}
}
if (pool == null) {
String msg = "There are no available storage pools to store the volume in";
s_logger.info(msg);
throw new StorageUnavailableException(msg, -1);
}
VolumeInfo vol = volFactory.getVolume(volume.getId());
DataStore store = dataStoreMgr.getDataStore(pool.getId(), DataStoreRole.Primary);
SnapshotInfo snapInfo = snapshotFactory.getSnapshot(snapshot.getId(), DataStoreRole.Image);
AsyncCallFuture<VolumeApiResult> future = volService.createVolumeFromSnapshot(vol, store, snapInfo);
try {
VolumeApiResult result = future.get();
if (result.isFailed()) {
s_logger.debug("Failed to create volume from snapshot:" + result.getResult());
throw new CloudRuntimeException("Failed to create volume from snapshot:" + result.getResult());
}
return result.getVolume();
} catch (InterruptedException e) {
s_logger.debug("Failed to create volume from snapshot", e);
throw new CloudRuntimeException("Failed to create volume from snapshot", e);
} catch (ExecutionException e) {
s_logger.debug("Failed to create volume from snapshot", e);
throw new CloudRuntimeException("Failed to create volume from snapshot", e);
}
}
protected DiskProfile createDiskCharacteristics(VolumeInfo volume,
VMTemplateVO template, DataCenterVO dc, DiskOfferingVO diskOffering) {
if (volume.getVolumeType() == Type.ROOT
&& Storage.ImageFormat.ISO != template.getFormat()) {
TemplateDataStoreVO ss = _vmTemplateStoreDao.findByTemplateZoneDownloadStatus(template.getId(), dc.getId(),
VMTemplateStorageResourceAssoc.Status.DOWNLOADED);
if (ss == null) {
throw new CloudRuntimeException("Template "
+ template.getName()
+ " has not been completely downloaded to zone "
+ dc.getId());
}
return new DiskProfile(volume.getId(), volume.getVolumeType(),
volume.getName(), diskOffering.getId(), ss.getSize(),
diskOffering.getTagsArray(),
diskOffering.getUseLocalStorage(),
diskOffering.isRecreatable(),
Storage.ImageFormat.ISO != template.getFormat() ? template
.getId() : null);
} else {
return new DiskProfile(volume.getId(), volume.getVolumeType(),
volume.getName(), diskOffering.getId(),
diskOffering.getDiskSize(), diskOffering.getTagsArray(),
diskOffering.getUseLocalStorage(),
diskOffering.isRecreatable(), null);
}
}
protected VolumeVO createVolumeFromSnapshot(VolumeVO volume, long snapshotId) throws StorageUnavailableException {
VolumeInfo createdVolume = null;
SnapshotVO snapshot = _snapshotDao.findById(snapshotId);
createdVolume = createVolumeFromSnapshot(volume,
snapshot);
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, createdVolume.getAccountId(), createdVolume.getDataCenterId(), createdVolume.getId(),
createdVolume.getName(), createdVolume.getDiskOfferingId(), null, createdVolume.getSize(), Volume.class.getName(), createdVolume.getUuid());
return _volsDao.findById(createdVolume.getId());
}
@DB
public VolumeInfo copyVolumeFromSecToPrimary(VolumeInfo volume,
VMInstanceVO vm, VMTemplateVO template, DataCenterVO dc,
HostPodVO pod, Long clusterId, ServiceOfferingVO offering,
DiskOfferingVO diskOffering, List<StoragePool> avoids,
long size, HypervisorType hyperType) throws NoTransitionException {
final HashSet<StoragePool> avoidPools = new HashSet<StoragePool>(
avoids);
DiskProfile dskCh = createDiskCharacteristics(volume, template, dc,
diskOffering);
dskCh.setHyperType(vm.getHypervisorType());
// Find a suitable storage to create volume on
StoragePool destPool = storageMgr.findStoragePool(dskCh, dc, pod,
clusterId, null, vm, avoidPools);
DataStore destStore = dataStoreMgr.getDataStore(destPool.getId(), DataStoreRole.Primary);
AsyncCallFuture<VolumeApiResult> future = volService.copyVolume(volume, destStore);
try {
VolumeApiResult result = future.get();
if (result.isFailed()) {
s_logger.debug("copy volume failed: " + result.getResult());
throw new CloudRuntimeException("copy volume failed: " + result.getResult());
}
return result.getVolume();
} catch (InterruptedException e) {
s_logger.debug("Failed to copy volume: " + volume.getId(), e);
throw new CloudRuntimeException("Failed to copy volume", e);
} catch (ExecutionException e) {
s_logger.debug("Failed to copy volume: " + volume.getId(), e);
throw new CloudRuntimeException("Failed to copy volume", e);
}
}
@DB
public VolumeInfo createVolume(VolumeInfo volume, VMInstanceVO vm,
VMTemplateVO template, DataCenterVO dc, HostPodVO pod,
Long clusterId, ServiceOfferingVO offering,
DiskOfferingVO diskOffering, List<StoragePool> avoids,
long size, HypervisorType hyperType) {
StoragePool pool = null;
if (diskOffering != null && diskOffering.isCustomized()) {
diskOffering.setDiskSize(size);
}
DiskProfile dskCh = null;
if (volume.getVolumeType() == Type.ROOT
&& Storage.ImageFormat.ISO != template.getFormat()) {
dskCh = createDiskCharacteristics(volume, template, dc, offering);
} else {
dskCh = createDiskCharacteristics(volume, template, dc,
diskOffering);
}
dskCh.setHyperType(hyperType);
final HashSet<StoragePool> avoidPools = new HashSet<StoragePool>(
avoids);
pool = storageMgr.findStoragePool(dskCh, dc, pod, clusterId, vm.getHostId(),
vm, avoidPools);
if (pool == null) {
s_logger.warn("Unable to find storage pool when create volume "
+ volume.getName());
throw new CloudRuntimeException("Unable to find storage pool when create volume" + volume.getName());
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("Trying to create " + volume + " on " + pool);
}
DataStore store = dataStoreMgr.getDataStore(pool.getId(), DataStoreRole.Primary);
AsyncCallFuture<VolumeApiResult> future = null;
boolean isNotCreatedFromTemplate = volume.getTemplateId() == null ? true : false;
if (isNotCreatedFromTemplate) {
future = volService.createVolumeAsync(volume, store);
} else {
TemplateInfo templ = tmplFactory.getTemplate(template.getId(), DataStoreRole.Image);
future = volService.createVolumeFromTemplateAsync(volume, store.getId(), templ);
}
try {
VolumeApiResult result = future.get();
if (result.isFailed()) {
s_logger.debug("create volume failed: " + result.getResult());
throw new CloudRuntimeException("create volume failed:" + result.getResult());
}
return result.getVolume();
} catch (InterruptedException e) {
s_logger.error("create volume failed", e);
throw new CloudRuntimeException("create volume failed", e);
} catch (ExecutionException e) {
s_logger.error("create volume failed", e);
throw new CloudRuntimeException("create volume failed", e);
}
}
public String getRandomVolumeName() {
return UUID.randomUUID().toString();
}
private VolumeVO persistVolume(Account owner, Long zoneId,
String volumeName, String url, String format) {
Transaction txn = Transaction.currentTxn();
txn.start();
VolumeVO volume = new VolumeVO(volumeName, zoneId, -1, -1, -1,
new Long(-1), null, null, 0, Volume.Type.DATADISK);
volume.setPoolId(null);
volume.setDataCenterId(zoneId);
volume.setPodId(null);
volume.setAccountId(owner.getAccountId());
volume.setDomainId(owner.getDomainId());
long diskOfferingId = _diskOfferingDao.findByUniqueName(
"Cloud.com-Custom").getId();
volume.setDiskOfferingId(diskOfferingId);
// volume.setSize(size);
volume.setInstanceId(null);
volume.setUpdated(new Date());
volume.setDomainId((owner == null) ? Domain.ROOT_DOMAIN : owner
.getDomainId());
volume.setFormat(ImageFormat.valueOf(format));
volume = _volsDao.persist(volume);
UserContext.current().setEventDetails("Volume Id: " + volume.getId());
// Increment resource count during allocation; if actual creation fails,
// decrement it
_resourceLimitMgr.incrementResourceCount(volume.getAccountId(),
ResourceType.volume);
_resourceLimitMgr.incrementResourceCount(volume.getAccountId(), ResourceType.secondary_storage,
UriUtils.getRemoteSize(url));
txn.commit();
return volume;
}
@Override
public boolean volumeOnSharedStoragePool(VolumeVO volume) {
Long poolId = volume.getPoolId();
if (poolId == null) {
return false;
} else {
StoragePoolVO pool = _storagePoolDao.findById(poolId);
if (pool == null) {
return false;
} else {
return (pool.getScope() == ScopeType.HOST) ? false : true;
}
}
}
@Override
public boolean volumeInactive(Volume volume) {
Long vmId = volume.getInstanceId();
if (vmId != null) {
UserVm vm = _userVmDao.findById(vmId);
if (vm == null) {
return true;
}
State state = vm.getState();
if (state.equals(State.Stopped) || state.equals(State.Destroyed)) {
return true;
}
}
return false;
}
@Override
public String getVmNameOnVolume(Volume volume) {
Long vmId = volume.getInstanceId();
if (vmId != null) {
VMInstanceVO vm = _vmInstanceDao.findById(vmId);
if (vm == null) {
return null;
}
return vm.getInstanceName();
}
return null;
}
/*
* Just allocate a volume in the database, don't send the createvolume cmd
* to hypervisor. The volume will be finally created only when it's attached
* to a VM.
*/
@Override
@DB
@ActionEvent(eventType = EventTypes.EVENT_VOLUME_CREATE, eventDescription = "creating volume", create = true)
public VolumeVO allocVolume(CreateVolumeCmd cmd)
throws ResourceAllocationException {
// FIXME: some of the scheduled event stuff might be missing here...
Account caller = UserContext.current().getCaller();
long ownerId = cmd.getEntityOwnerId();
Boolean displayVolumeEnabled = cmd.getDisplayVolume();
// permission check
_accountMgr.checkAccess(caller, null, true,
_accountMgr.getActiveAccountById(ownerId));
// Check that the resource limit for volumes won't be exceeded
_resourceLimitMgr.checkResourceLimit(_accountMgr.getAccount(ownerId),
ResourceType.volume);
Long zoneId = cmd.getZoneId();
Long diskOfferingId = null;
DiskOfferingVO diskOffering = null;
Long size = null;
Long minIops = null;
Long maxIops = null;
// Volume VO used for extracting the source template id
VolumeVO parentVolume = null;
// validate input parameters before creating the volume
if ((cmd.getSnapshotId() == null && cmd.getDiskOfferingId() == null)
|| (cmd.getSnapshotId() != null && cmd.getDiskOfferingId() != null)) {
throw new InvalidParameterValueException(
"Either disk Offering Id or snapshot Id must be passed whilst creating volume");
}
if (cmd.getSnapshotId() == null) {// create a new volume
diskOfferingId = cmd.getDiskOfferingId();
size = cmd.getSize();
Long sizeInGB = size;
if (size != null) {
if (size > 0) {
size = size * 1024 * 1024 * 1024; // user specify size in GB
} else {
throw new InvalidParameterValueException(
"Disk size must be larger than 0");
}
}
// Check that the the disk offering is specified
diskOffering = _diskOfferingDao.findById(diskOfferingId);
if ((diskOffering == null) || diskOffering.getRemoved() != null
|| !DiskOfferingVO.Type.Disk.equals(diskOffering.getType())) {
throw new InvalidParameterValueException(
"Please specify a valid disk offering.");
}
if (diskOffering.isCustomized()) {
if (size == null) {
throw new InvalidParameterValueException(
"This disk offering requires a custom size specified");
}
if ((sizeInGB < _customDiskOfferingMinSize)
|| (sizeInGB > _customDiskOfferingMaxSize)) {
throw new InvalidParameterValueException("Volume size: "
+ sizeInGB + "GB is out of allowed range. Max: "
+ _customDiskOfferingMaxSize + " Min:"
+ _customDiskOfferingMinSize);
}
}
if (!diskOffering.isCustomized() && size != null) {
throw new InvalidParameterValueException(
"This disk offering does not allow custom size");
}
if (diskOffering.getDomainId() == null) {
// do nothing as offering is public
} else {
_configMgr.checkDiskOfferingAccess(caller, diskOffering);
}
if (diskOffering.getDiskSize() > 0) {
size = diskOffering.getDiskSize();
}
Boolean isCustomizedIops = diskOffering.isCustomizedIops();
if (isCustomizedIops != null) {
if (isCustomizedIops) {
minIops = cmd.getMinIops();
maxIops = cmd.getMaxIops();
if (minIops == null && maxIops == null) {
minIops = 0L;
maxIops = 0L;
}
else {
if (minIops == null || minIops <= 0) {
throw new InvalidParameterValueException("The min IOPS must be greater than 0.");
}
if (maxIops == null) {
maxIops = 0L;
}
if (minIops > maxIops) {
throw new InvalidParameterValueException("The min IOPS must be less than or equal to the max IOPS.");
}
}
}
else {
minIops = diskOffering.getMinIops();
maxIops = diskOffering.getMaxIops();
}
}
if (!validateVolumeSizeRange(size)) {// convert size from mb to gb
// for validation
throw new InvalidParameterValueException(
"Invalid size for custom volume creation: " + size
+ " ,max volume size is:" + _maxVolumeSizeInGb);
}
} else { // create volume from snapshot
Long snapshotId = cmd.getSnapshotId();
SnapshotVO snapshotCheck = _snapshotDao.findById(snapshotId);
if (snapshotCheck == null) {
throw new InvalidParameterValueException(
"unable to find a snapshot with id " + snapshotId);
}
if (snapshotCheck.getState() != Snapshot.State.BackedUp) {
throw new InvalidParameterValueException("Snapshot id="
+ snapshotId + " is not in " + Snapshot.State.BackedUp
+ " state yet and can't be used for volume creation");
}
parentVolume = _volsDao.findByIdIncludingRemoved(snapshotCheck.getVolumeId());
diskOfferingId = snapshotCheck.getDiskOfferingId();
diskOffering = _diskOfferingDao.findById(diskOfferingId);
zoneId = snapshotCheck.getDataCenterId();
size = snapshotCheck.getSize(); // ; disk offering is used for tags
// purposes
// check snapshot permissions
_accountMgr.checkAccess(caller, null, true, snapshotCheck);
}
if(displayVolumeEnabled == null){
displayVolumeEnabled = true;
} else{
if(!_accountMgr.isRootAdmin(caller.getType())){
throw new PermissionDeniedException( "Cannot update parameter displayvolume, only admin permitted ");
}
}
// Check that the resource limit for primary storage won't be exceeded
_resourceLimitMgr.checkResourceLimit(_accountMgr.getAccount(ownerId), ResourceType.primary_storage,
new Long(size));
// Verify that zone exists
DataCenterVO zone = _dcDao.findById(zoneId);
if (zone == null) {
throw new InvalidParameterValueException(
"Unable to find zone by id " + zoneId);
}
// Check if zone is disabled
if (Grouping.AllocationState.Disabled == zone.getAllocationState()
&& !_accountMgr.isRootAdmin(caller.getType())) {
throw new PermissionDeniedException(
"Cannot perform this operation, Zone is currently disabled: "
+ zoneId);
}
// If local storage is disabled then creation of volume with local disk
// offering not allowed
if (!zone.isLocalStorageEnabled() && diskOffering.getUseLocalStorage()) {
throw new InvalidParameterValueException(
"Zone is not configured to use local storage but volume's disk offering "
+ diskOffering.getName() + " uses it");
}
String userSpecifiedName = cmd.getVolumeName();
if (userSpecifiedName == null) {
userSpecifiedName = getRandomVolumeName();
}
Transaction txn = Transaction.currentTxn();
txn.start();
VolumeVO volume = new VolumeVO(userSpecifiedName, -1, -1, -1, -1,
new Long(-1), null, null, 0, Volume.Type.DATADISK);
volume.setPoolId(null);
volume.setDataCenterId(zoneId);
volume.setPodId(null);
volume.setAccountId(ownerId);
volume.setDomainId(((caller == null) ? Domain.ROOT_DOMAIN : caller
.getDomainId()));
volume.setDiskOfferingId(diskOfferingId);
volume.setSize(size);
volume.setMinIops(minIops);
volume.setMaxIops(maxIops);
volume.setInstanceId(null);
volume.setUpdated(new Date());
volume.setDomainId((caller == null) ? Domain.ROOT_DOMAIN : caller
.getDomainId());
volume.setDisplayVolume(displayVolumeEnabled);
if (parentVolume != null) {
volume.setTemplateId(parentVolume.getTemplateId());
volume.setFormat(parentVolume.getFormat());
} else {
volume.setTemplateId(null);
}
volume = _volsDao.persist(volume);
if (cmd.getSnapshotId() == null) {
// for volume created from snapshot, create usage event after volume creation
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(), diskOfferingId,
null, size, Volume.class.getName(), volume.getUuid());
}
UserContext.current().setEventDetails("Volume Id: " + volume.getId());
// Increment resource count during allocation; if actual creation fails,
// decrement it
_resourceLimitMgr.incrementResourceCount(volume.getAccountId(),
ResourceType.volume);
_resourceLimitMgr.incrementResourceCount(volume.getAccountId(), ResourceType.primary_storage,
new Long(volume.getSize()));
txn.commit();
return volume;
}
@Override
@DB
@ActionEvent(eventType = EventTypes.EVENT_VOLUME_CREATE, eventDescription = "creating volume", async = true)
public VolumeVO createVolume(CreateVolumeCmd cmd) {
VolumeVO volume = _volsDao.findById(cmd.getEntityId());
boolean created = true;
try {
if (cmd.getSnapshotId() != null) {
volume = createVolumeFromSnapshot(volume, cmd.getSnapshotId());
if (volume.getState() != Volume.State.Ready) {
created = false;
}
}
return volume;
} catch(Exception e) {
created = false;
s_logger.debug("Failed to create volume: " + volume.getId(), e);
return null;
} finally {
if (!created) {
s_logger.trace("Decrementing volume resource count for account id="
+ volume.getAccountId()
+ " as volume failed to create on the backend");
_resourceLimitMgr.decrementResourceCount(volume.getAccountId(),
ResourceType.volume);
_resourceLimitMgr.decrementResourceCount(volume.getAccountId(), ResourceType.primary_storage,
new Long(volume.getSize()));
}
}
}
@Override
@DB
@ActionEvent(eventType = EventTypes.EVENT_VOLUME_RESIZE, eventDescription = "resizing volume", async = true)
public VolumeVO resizeVolume(ResizeVolumeCmd cmd)
throws ResourceAllocationException {
Long newSize = null;
boolean shrinkOk = cmd.getShrinkOk();
VolumeVO volume = _volsDao.findById(cmd.getEntityId());
if (volume == null) {
throw new InvalidParameterValueException("No such volume");
}
DiskOfferingVO diskOffering = _diskOfferingDao.findById(volume
.getDiskOfferingId());
DiskOfferingVO newDiskOffering = null;
newDiskOffering = _diskOfferingDao.findById(cmd.getNewDiskOfferingId());
/*
* Volumes with no hypervisor have never been assigned, and can be
* resized by recreating. perhaps in the future we can just update the
* db entry for the volume
*/
if (_volsDao.getHypervisorType(volume.getId()) == HypervisorType.None) {
throw new InvalidParameterValueException(
"Can't resize a volume that has never been attached, not sure which hypervisor type. Recreate volume to resize.");
}
/* Only works for KVM/Xen for now */
if (_volsDao.getHypervisorType(volume.getId()) != HypervisorType.KVM
&& _volsDao.getHypervisorType(volume.getId()) != HypervisorType.XenServer
&& _volsDao.getHypervisorType(volume.getId()) != HypervisorType.VMware) {
throw new InvalidParameterValueException(
"Cloudstack currently only supports volumes marked as KVM or XenServer hypervisor for resize");
}
if (volume.getState() != Volume.State.Ready) {
throw new InvalidParameterValueException(
"Volume should be in ready state before attempting a resize");
}
if (!volume.getVolumeType().equals(Volume.Type.DATADISK)) {
throw new InvalidParameterValueException(
"Can only resize DATA volumes");
}
/*
* figure out whether or not a new disk offering or size parameter is
* required, get the correct size value
*/
if (newDiskOffering == null) {
if (diskOffering.isCustomized()) {
newSize = cmd.getSize();
if (newSize == null) {
throw new InvalidParameterValueException(
"new offering is of custom size, need to specify a size");
}
newSize = (newSize << 30);
} else {
throw new InvalidParameterValueException("current offering"
+ volume.getDiskOfferingId()
+ " cannot be resized, need to specify a disk offering");
}
} else {
if (newDiskOffering.getRemoved() != null
|| !DiskOfferingVO.Type.Disk.equals(newDiskOffering
.getType())) {
throw new InvalidParameterValueException(
"Disk offering ID is missing or invalid");
}
if (diskOffering.getTags() != null) {
if (!newDiskOffering.getTags().equals(diskOffering.getTags())) {
throw new InvalidParameterValueException(
"Tags on new and old disk offerings must match");
}
} else if (newDiskOffering.getTags() != null) {
throw new InvalidParameterValueException(
"There are no tags on current disk offering, new disk offering needs to have no tags");
}
if (newDiskOffering.getDomainId() == null) {
// do nothing as offering is public
} else {
_configMgr.checkDiskOfferingAccess(UserContext.current()
.getCaller(), newDiskOffering);
}
if (newDiskOffering.isCustomized()) {
newSize = cmd.getSize();
if (newSize == null) {
throw new InvalidParameterValueException(
"new offering is of custom size, need to specify a size");
}
newSize = (newSize << 30);
} else {
newSize = newDiskOffering.getDiskSize();
}
}
if (newSize == null) {
throw new InvalidParameterValueException(
"could not detect a size parameter or fetch one from the diskofferingid parameter");
}
if (!validateVolumeSizeRange(newSize)) {
throw new InvalidParameterValueException(
"Requested size out of range");
}
/* does the caller have the authority to act on this volume? */
_accountMgr.checkAccess(UserContext.current().getCaller(), null, true,
volume);
UserVmVO userVm = _userVmDao.findById(volume.getInstanceId());
long currentSize = volume.getSize();
/*
* lets make certain they (think they) know what they're doing if they
* want to shrink, by forcing them to provide the shrinkok parameter.
* This will be checked again at the hypervisor level where we can see
* the actual disk size
*/
if (currentSize > newSize && !shrinkOk) {
throw new InvalidParameterValueException(
"Going from existing size of "
+ currentSize
+ " to size of "
+ newSize
+ " would shrink the volume, need to sign off by supplying the shrinkok parameter with value of true");
}
if (!shrinkOk) {
/* Check resource limit for this account on primary storage resource */
_resourceLimitMgr.checkResourceLimit(_accountMgr.getAccount(volume.getAccountId()),
ResourceType.primary_storage, new Long(newSize - currentSize));
}
/*
* get a list of hosts to send the commands to, try the system the
* associated vm is running on first, then the last known place it ran.
* If not attached to a userVm, we pass 'none' and resizevolume.sh is ok
* with that since it only needs the vm name to live resize
*/
long[] hosts = null;
String instanceName = "none";
if (userVm != null) {
instanceName = userVm.getInstanceName();
if (userVm.getHostId() != null) {
hosts = new long[] { userVm.getHostId() };
} else if (userVm.getLastHostId() != null) {
hosts = new long[] { userVm.getLastHostId() };
}
/* Xen only works offline, SR does not support VDI.resizeOnline */
if (_volsDao.getHypervisorType(volume.getId()) == HypervisorType.XenServer
&& !userVm.getState().equals(State.Stopped)) {
throw new InvalidParameterValueException(
"VM must be stopped or disk detached in order to resize with the Xen HV");
}
}
ResizeVolumePayload payload = new ResizeVolumePayload(newSize, shrinkOk, instanceName, hosts);
try {
VolumeInfo vol = volFactory.getVolume(volume.getId());
vol.addPayload(payload);
AsyncCallFuture<VolumeApiResult> future = volService.resize(vol);
VolumeApiResult result = future.get();
if (result.isFailed()) {
s_logger.warn("Failed to resize the volume " + volume);
return null;
}
volume = _volsDao.findById(volume.getId());
if (newDiskOffering != null) {
volume.setDiskOfferingId(cmd.getNewDiskOfferingId());
}
_volsDao.update(volume.getId(), volume);
// Log usage event for volumes belonging user VM's only
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_RESIZE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(),
volume.getDiskOfferingId(), volume.getTemplateId(), volume.getSize(), Volume.class.getName(), volume.getUuid());
/* Update resource count for the account on primary storage resource */
if (!shrinkOk) {
_resourceLimitMgr.incrementResourceCount(volume.getAccountId(), ResourceType.primary_storage,
new Long(newSize - currentSize));
} else {
_resourceLimitMgr.decrementResourceCount(volume.getAccountId(), ResourceType.primary_storage,
new Long(currentSize - newSize));
}
return volume;
} catch (InterruptedException e) {
s_logger.warn("failed get resize volume result", e);
} catch (ExecutionException e) {
s_logger.warn("failed get resize volume result", e);
} catch (Exception e) {
s_logger.warn("failed get resize volume result", e);
}
return null;
}
@Override
@DB
@ActionEvent(eventType = EventTypes.EVENT_VOLUME_DELETE, eventDescription = "deleting volume")
public boolean deleteVolume(long volumeId, Account caller)
throws ConcurrentOperationException {
VolumeVO volume = _volsDao.findById(volumeId);
if (volume == null) {
throw new InvalidParameterValueException(
"Unable to aquire volume with ID: " + volumeId);
}
if (!_snapshotMgr.canOperateOnVolume(volume)) {
throw new InvalidParameterValueException(
"There are snapshot creating on it, Unable to delete the volume");
}
_accountMgr.checkAccess(caller, null, true, volume);
if (volume.getInstanceId() != null) {
throw new InvalidParameterValueException(
"Please specify a volume that is not attached to any VM.");
}
if (volume.getState() == Volume.State.UploadOp) {
VolumeDataStoreVO volumeStore = _volumeStoreDao.findByVolume(volume
.getId());
if (volumeStore.getDownloadState() == VMTemplateStorageResourceAssoc.Status.DOWNLOAD_IN_PROGRESS) {
throw new InvalidParameterValueException(
"Please specify a volume that is not uploading");
}
}
try {
if (volume.getState() != Volume.State.Destroy && volume.getState() != Volume.State.Expunging && volume.getState() != Volume.State.Expunging) {
Long instanceId = volume.getInstanceId();
if (!volService.destroyVolume(volume.getId())) {
return false;
}
VMInstanceVO vmInstance = _vmInstanceDao.findById(instanceId);
if (instanceId == null
|| (vmInstance.getType().equals(VirtualMachine.Type.User))) {
// Decrement the resource count for volumes and primary storage belonging user VM's only
_resourceLimitMgr.decrementResourceCount(volume.getAccountId(),
ResourceType.volume);
/* If volume is in primary storage, decrement primary storage count else decrement secondary
storage count (in case of upload volume). */
if (volume.getFolder() != null || volume.getPath() != null || volume.getState() == Volume.State.Allocated) {
_resourceLimitMgr.decrementResourceCount(volume.getAccountId(), ResourceType.primary_storage,
new Long(volume.getSize()));
} else {
_resourceLimitMgr.recalculateResourceCount(volume.getAccountId(), volume.getDomainId(),
ResourceType.secondary_storage.getOrdinal());
}
// Log usage event for volumes belonging user VM's only
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_DELETE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(),
Volume.class.getName(), volume.getUuid());
}
}
// Mark volume as removed if volume has not been created on primary or secondary
if (volume.getState() == Volume.State.Allocated) {
_volsDao.remove(volumeId);
stateTransitTo(volume, Volume.Event.DestroyRequested);
return true;
}
// expunge volume from primary if volume is on primary
VolumeInfo volOnPrimary = volFactory.getVolume(volume.getId(), DataStoreRole.Primary);
if (volOnPrimary != null) {
s_logger.info("Expunging volume " + volume.getId() + " from primary data store");
AsyncCallFuture<VolumeApiResult> future = volService.expungeVolumeAsync(volOnPrimary);
future.get();
}
// expunge volume from secondary if volume is on image store
VolumeInfo volOnSecondary = volFactory.getVolume(volume.getId(), DataStoreRole.Image);
if (volOnSecondary != null) {
s_logger.info("Expunging volume " + volume.getId() + " from secondary data store");
AsyncCallFuture<VolumeApiResult> future2 = volService.expungeVolumeAsync(volOnSecondary);
future2.get();
}
} catch (Exception e) {
s_logger.warn("Failed to expunge volume:", e);
return false;
}
return true;
}
@Override
public boolean validateVolumeSizeRange(long size) {
if (size < 0 || (size > 0 && size < (1024 * 1024 * 1024))) {
throw new InvalidParameterValueException(
"Please specify a size of at least 1 Gb.");
} else if (size > (_maxVolumeSizeInGb * 1024 * 1024 * 1024)) {
throw new InvalidParameterValueException("volume size " + size
+ ", but the maximum size allowed is " + _maxVolumeSizeInGb
+ " Gb.");
}
return true;
}
protected DiskProfile toDiskProfile(VolumeVO vol, DiskOfferingVO offering) {
return new DiskProfile(vol.getId(), vol.getVolumeType(), vol.getName(),
offering.getId(), vol.getSize(), offering.getTagsArray(),
offering.getUseLocalStorage(), offering.isRecreatable(),
vol.getTemplateId());
}
@Override
public DiskProfile allocateRawVolume(Type type,
String name, DiskOfferingVO offering, Long size, VMInstanceVO vm, VMTemplateVO template, Account owner) {
Long isoId=null;
if (size == null) {
size = offering.getDiskSize();
} else {
size = (size * 1024 * 1024 * 1024);
}
VolumeVO vol = new VolumeVO(type, name, vm.getDataCenterId(),
owner.getDomainId(), owner.getId(), offering.getId(), size,
offering.getMinIops(), offering.getMaxIops(), null);
if (vm != null) {
vol.setInstanceId(vm.getId());
}
if (type.equals(Type.ROOT)) {
vol.setDeviceId(0l);
} else {
vol.setDeviceId(1l);
}
if (template.getFormat() == ImageFormat.ISO) {
vol.setIsoId(template.getId());
}
vol.setFormat(getSupportedImageFormatForCluster(vm.getHypervisorType()));
vol = _volsDao.persist(vol);
// Save usage event and update resource count for user vm volumes
if (vm instanceof UserVm) {
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, vol.getAccountId(), vol.getDataCenterId(), vol.getId(), vol.getName(), offering.getId(), null, size,
Volume.class.getName(), vol.getUuid());
_resourceLimitMgr.incrementResourceCount(vm.getAccountId(),
ResourceType.volume);
_resourceLimitMgr.incrementResourceCount(vm.getAccountId(), ResourceType.primary_storage,
new Long(vol.getSize()));
}
return toDiskProfile(vol, offering);
}
@Override
public DiskProfile allocateTemplatedVolume(
Type type, String name, DiskOfferingVO offering,
VMTemplateVO template, VMInstanceVO vm, Account owner) {
assert (template.getFormat() != ImageFormat.ISO) : "ISO is not a template really....";
Long size = _tmpltMgr.getTemplateSize(template.getId(), vm.getDataCenterId());
VolumeVO vol = new VolumeVO(type, name, vm.getDataCenterId(),
owner.getDomainId(), owner.getId(), offering.getId(), size,
offering.getMinIops(), offering.getMaxIops(), null);
vol.setFormat(getSupportedImageFormatForCluster(template.getHypervisorType()));
if (vm != null) {
vol.setInstanceId(vm.getId());
}
vol.setTemplateId(template.getId());
if (type.equals(Type.ROOT)) {
vol.setDeviceId(0l);
if (!vm.getType().equals(VirtualMachine.Type.User)) {
vol.setRecreatable(true);
}
} else {
vol.setDeviceId(1l);
}
vol = _volsDao.persist(vol);
// Create event and update resource count for volumes if vm is a user vm
if (vm instanceof UserVm) {
Long offeringId = null;
if (offering.getType() == DiskOfferingVO.Type.Disk) {
offeringId = offering.getId();
}
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, vol.getAccountId(), vol.getDataCenterId(), vol.getId(), vol.getName(), offeringId, null, size,
Volume.class.getName(), vol.getUuid());
_resourceLimitMgr.incrementResourceCount(vm.getAccountId(),
ResourceType.volume);
_resourceLimitMgr.incrementResourceCount(vm.getAccountId(), ResourceType.primary_storage,
new Long(vol.getSize()));
}
return toDiskProfile(vol, offering);
}
private ImageFormat getSupportedImageFormatForCluster(HypervisorType hyperType) {
if (hyperType == HypervisorType.XenServer) {
return ImageFormat.VHD;
} else if (hyperType == HypervisorType.KVM) {
return ImageFormat.QCOW2;
} else if (hyperType == HypervisorType.VMware) {
return ImageFormat.OVA;
} else if (hyperType == HypervisorType.Ovm) {
return ImageFormat.RAW;
} else {
return null;
}
}
private VolumeInfo copyVolume(StoragePoolVO rootDiskPool
, VolumeInfo volume, VMInstanceVO vm, VMTemplateVO rootDiskTmplt, DataCenterVO dcVO,
HostPodVO pod, DiskOfferingVO diskVO, ServiceOfferingVO svo, HypervisorType rootDiskHyperType) throws NoTransitionException {
if (!volume
.getFormat()
.equals(
getSupportedImageFormatForCluster(rootDiskHyperType))) {
throw new InvalidParameterValueException(
"Failed to attach volume to VM since volumes format "
+ volume.getFormat()
.getFileExtension()
+ " is not compatible with the vm hypervisor type");
}
VolumeInfo volumeOnPrimary = copyVolumeFromSecToPrimary(volume,
vm, rootDiskTmplt, dcVO, pod,
rootDiskPool.getClusterId(), svo, diskVO,
new ArrayList<StoragePool>(),
volume.getSize(), rootDiskHyperType);
return volumeOnPrimary;
}
private VolumeInfo createVolumeOnPrimaryStorage(VMInstanceVO vm, VolumeVO rootVolumeOfVm, VolumeInfo volume, HypervisorType rootDiskHyperType) throws NoTransitionException {
VMTemplateVO rootDiskTmplt = _templateDao.findById(vm
.getTemplateId());
DataCenterVO dcVO = _dcDao.findById(vm
.getDataCenterId());
HostPodVO pod = _podDao.findById(vm.getPodIdToDeployIn());
StoragePoolVO rootDiskPool = _storagePoolDao
.findById(rootVolumeOfVm.getPoolId());
ServiceOfferingVO svo = _serviceOfferingDao.findById(vm
.getServiceOfferingId());
DiskOfferingVO diskVO = _diskOfferingDao.findById(volume
.getDiskOfferingId());
Long clusterId = (rootDiskPool == null ? null : rootDiskPool
.getClusterId());
VolumeInfo vol = null;
if (volume.getState() == Volume.State.Allocated) {
vol = createVolume(volume, vm,
rootDiskTmplt, dcVO, pod, clusterId, svo, diskVO,
new ArrayList<StoragePool>(), volume.getSize(),
rootDiskHyperType);
} else if (volume.getState() == Volume.State.Uploaded) {
vol = copyVolume(rootDiskPool
, volume, vm, rootDiskTmplt, dcVO,
pod, diskVO, svo, rootDiskHyperType);
if (vol != null) {
// Moving of Volume is successful, decrement the volume resource count from secondary for an account and increment it into primary storage under same account.
_resourceLimitMgr.decrementResourceCount(volume.getAccountId(),
ResourceType.secondary_storage, new Long(volume.getSize()));
_resourceLimitMgr.incrementResourceCount(volume.getAccountId(),
ResourceType.primary_storage, new Long(volume.getSize()));
}
}
VolumeVO volVO = _volsDao.findById(vol.getId());
volVO.setFormat(getSupportedImageFormatForCluster(rootDiskHyperType));
_volsDao.update(volVO.getId(), volVO);
return volFactory.getVolume(volVO.getId());
}
private boolean needMoveVolume(VolumeVO rootVolumeOfVm, VolumeInfo volume) {
if (rootVolumeOfVm.getPoolId() == null || volume.getPoolId() == null) {
return false;
}
DataStore storeForRootVol = dataStoreMgr.getPrimaryDataStore(rootVolumeOfVm.getPoolId());
DataStore storeForDataVol = dataStoreMgr.getPrimaryDataStore(volume.getPoolId());
Scope storeForRootStoreScope = storeForRootVol.getScope();
if (storeForRootStoreScope == null) {
throw new CloudRuntimeException("Can't get scope of data store: " + storeForRootVol.getId());
}
Scope storeForDataStoreScope = storeForDataVol.getScope();
if (storeForDataStoreScope == null) {
throw new CloudRuntimeException("Can't get scope of data store: " + storeForDataVol.getId());
}
if (storeForDataStoreScope.getScopeType() == ScopeType.ZONE) {
return false;
}
if (storeForRootStoreScope.getScopeType() != storeForDataStoreScope.getScopeType()) {
if (storeForDataStoreScope.getScopeType() == ScopeType.CLUSTER && storeForRootStoreScope.getScopeType() == ScopeType.HOST) {
HostScope hs = (HostScope)storeForRootStoreScope;
if (storeForDataStoreScope.getScopeId().equals(hs.getClusterId())) {
return false;
}
}
if (storeForRootStoreScope.getScopeType() == ScopeType.CLUSTER && storeForDataStoreScope.getScopeType() == ScopeType.HOST) {
HostScope hs = (HostScope)storeForDataStoreScope;
if (storeForRootStoreScope.getScopeId().equals(hs.getClusterId())) {
return false;
}
}
throw new CloudRuntimeException("Can't move volume between scope: " + storeForDataStoreScope.getScopeType() + " and " + storeForRootStoreScope.getScopeType());
}
return !storeForRootStoreScope.isSameScope(storeForDataStoreScope);
}
private VolumeVO sendAttachVolumeCommand(UserVmVO vm, VolumeVO volumeToAttach, Long deviceId) {
String errorMsg = "Failed to attach volume: " + volumeToAttach.getName()
+ " to VM: " + vm.getHostName();
boolean sendCommand = (vm.getState() == State.Running);
AttachAnswer answer = null;
Long hostId = vm.getHostId();
if (hostId == null) {
hostId = vm.getLastHostId();
HostVO host = _hostDao.findById(hostId);
if (host != null
&& host.getHypervisorType() == HypervisorType.VMware) {
sendCommand = true;
}
}
StoragePoolVO volumeToAttachStoragePool = null;
if (sendCommand) {
volumeToAttachStoragePool = _storagePoolDao.findById(volumeToAttach.getPoolId());
long storagePoolId = volumeToAttachStoragePool.getId();
DataTO volTO = volFactory.getVolume(volumeToAttach.getId()).getTO();
DiskTO disk = new DiskTO(volTO, deviceId, null, volumeToAttach.getVolumeType());
AttachCommand cmd = new AttachCommand(disk, vm.getInstanceName());
cmd.setManaged(volumeToAttachStoragePool.isManaged());
cmd.setStorageHost(volumeToAttachStoragePool.getHostAddress());
cmd.setStoragePort(volumeToAttachStoragePool.getPort());
cmd.set_iScsiName(volumeToAttach.get_iScsiName());
VolumeInfo volumeInfo = volFactory.getVolume(volumeToAttach.getId());
DataStore dataStore = dataStoreMgr.getDataStore(storagePoolId, DataStoreRole.Primary);
ChapInfo chapInfo = volService.getChapInfo(volumeInfo, dataStore);
if (chapInfo != null) {
cmd.setChapInitiatorUsername(chapInfo.getInitiatorUsername());
cmd.setChapInitiatorPassword(chapInfo.getInitiatorSecret());
cmd.setChapTargetUsername(chapInfo.getTargetUsername());
cmd.setChapTargetPassword(chapInfo.getTargetSecret());
}
try {
answer = (AttachAnswer)_agentMgr.send(hostId, cmd);
} catch (Exception e) {
throw new CloudRuntimeException(errorMsg + " due to: "
+ e.getMessage());
}
}
if (!sendCommand || (answer != null && answer.getResult())) {
// Mark the volume as attached
if (sendCommand) {
DiskTO disk = answer.getDisk();
_volsDao.attachVolume(volumeToAttach.getId(), vm.getId(),
disk.getDiskSeq());
volumeToAttach = _volsDao.findById(volumeToAttach.getId());
if (volumeToAttachStoragePool.isManaged() &&
volumeToAttach.getPath() == null) {
volumeToAttach.setPath(answer.getDisk().getVdiUuid());
_volsDao.update(volumeToAttach.getId(), volumeToAttach);
}
} else {
_volsDao.attachVolume(volumeToAttach.getId(), vm.getId(), deviceId);
}
// insert record for disk I/O statistics
VmDiskStatisticsVO diskstats = _vmDiskStatsDao.findBy(vm.getAccountId(), vm.getDataCenterId(),vm.getId(), volumeToAttach.getId());
if (diskstats == null) {
diskstats = new VmDiskStatisticsVO(vm.getAccountId(), vm.getDataCenterId(),vm.getId(), volumeToAttach.getId());
_vmDiskStatsDao.persist(diskstats);
}
return _volsDao.findById(volumeToAttach.getId());
} else {
if (answer != null) {
String details = answer.getDetails();
if (details != null && !details.isEmpty()) {
errorMsg += "; " + details;
}
}
throw new CloudRuntimeException(errorMsg);
}
}
private int getMaxDataVolumesSupported(UserVmVO vm) {
Long hostId = vm.getHostId();
if (hostId == null) {
hostId = vm.getLastHostId();
}
HostVO host = _hostDao.findById(hostId);
Integer maxDataVolumesSupported = null;
if (host != null) {
_hostDao.loadDetails(host);
maxDataVolumesSupported = _hypervisorCapabilitiesDao
.getMaxDataVolumesLimit(host.getHypervisorType(),
host.getDetail("product_version"));
}
if (maxDataVolumesSupported == null) {
maxDataVolumesSupported = 6; // 6 data disks by default if nothing
// is specified in
// 'hypervisor_capabilities' table
}
return maxDataVolumesSupported.intValue();
}
@Override
@ActionEvent(eventType = EventTypes.EVENT_VOLUME_ATTACH, eventDescription = "attaching volume", async = true)
public Volume attachVolumeToVM(AttachVolumeCmd command) {
Long vmId = command.getVirtualMachineId();
Long volumeId = command.getId();
Long deviceId = command.getDeviceId();
Account caller = UserContext.current().getCaller();
// Check that the volume ID is valid
VolumeInfo volume = volFactory.getVolume(volumeId);
// Check that the volume is a data volume
if (volume == null || volume.getVolumeType() != Volume.Type.DATADISK) {
throw new InvalidParameterValueException(
"Please specify a valid data volume.");
}
// Check that the volume is not currently attached to any VM
if (volume.getInstanceId() != null) {
throw new InvalidParameterValueException(
"Please specify a volume that is not attached to any VM.");
}
// Check that the volume is not destroyed
if (volume.getState() == Volume.State.Destroy) {
throw new InvalidParameterValueException(
"Please specify a volume that is not destroyed.");
}
// Check that the virtual machine ID is valid and it's a user vm
UserVmVO vm = _userVmDao.findById(vmId);
if (vm == null || vm.getType() != VirtualMachine.Type.User) {
throw new InvalidParameterValueException(
"Please specify a valid User VM.");
}
// Check that the VM is in the correct state
if (vm.getState() != State.Running && vm.getState() != State.Stopped) {
throw new InvalidParameterValueException(
"Please specify a VM that is either running or stopped.");
}
// Check that the device ID is valid
if (deviceId != null) {
if (deviceId.longValue() == 0) {
throw new InvalidParameterValueException(
"deviceId can't be 0, which is used by Root device");
}
}
// Check that the number of data volumes attached to VM is less than
// that supported by hypervisor
List<VolumeVO> existingDataVolumes = _volsDao.findByInstanceAndType(
vmId, Volume.Type.DATADISK);
int maxDataVolumesSupported = getMaxDataVolumesSupported(vm);
if (existingDataVolumes.size() >= maxDataVolumesSupported) {
throw new InvalidParameterValueException(
"The specified VM already has the maximum number of data disks ("
+ maxDataVolumesSupported
+ "). Please specify another VM.");
}
// Check that the VM and the volume are in the same zone
if (vm.getDataCenterId() != volume.getDataCenterId()) {
throw new InvalidParameterValueException(
"Please specify a VM that is in the same zone as the volume.");
}
// If local storage is disabled then attaching a volume with local disk
// offering not allowed
DataCenterVO dataCenter = _dcDao.findById(volume.getDataCenterId());
if (!dataCenter.isLocalStorageEnabled()) {
DiskOfferingVO diskOffering = _diskOfferingDao.findById(volume
.getDiskOfferingId());
if (diskOffering.getUseLocalStorage()) {
throw new InvalidParameterValueException(
"Zone is not configured to use local storage but volume's disk offering "
+ diskOffering.getName() + " uses it");
}
}
// if target VM has associated VM snapshots
List<VMSnapshotVO> vmSnapshots = _vmSnapshotDao.findByVm(vmId);
if(vmSnapshots.size() > 0){
throw new InvalidParameterValueException(
"Unable to attach volume, please specify a VM that does not have VM snapshots");
}
// permission check
_accountMgr.checkAccess(caller, null, true, volume, vm);
if (!(Volume.State.Allocated.equals(volume.getState())
|| Volume.State.Ready.equals(volume.getState()) || Volume.State.Uploaded
.equals(volume.getState()))) {
throw new InvalidParameterValueException(
"Volume state must be in Allocated, Ready or in Uploaded state");
}
VolumeVO rootVolumeOfVm = null;
List<VolumeVO> rootVolumesOfVm = _volsDao.findByInstanceAndType(vmId,
Volume.Type.ROOT);
if (rootVolumesOfVm.size() != 1) {
throw new CloudRuntimeException(
"The VM "
+ vm.getHostName()
+ " has more than one ROOT volume and is in an invalid state.");
} else {
rootVolumeOfVm = rootVolumesOfVm.get(0);
}
HypervisorType rootDiskHyperType = vm.getHypervisorType();
HypervisorType dataDiskHyperType = _volsDao.getHypervisorType(volume
.getId());
if (dataDiskHyperType != HypervisorType.None
&& rootDiskHyperType != dataDiskHyperType) {
throw new InvalidParameterValueException(
"Can't attach a volume created by: " + dataDiskHyperType
+ " to a " + rootDiskHyperType + " vm");
}
deviceId = getDeviceId(vmId, deviceId);
VolumeInfo volumeOnPrimaryStorage = volume;
// Check if volume is stored on secondary storage
boolean isVolumeOnSec = false;
VolumeInfo volOnSecondary = volFactory.getVolume(volume.getId(), DataStoreRole.Image);
if (volOnSecondary != null) {
isVolumeOnSec = true;
if(volOnSecondary.getState() != Volume.State.Uploaded) {
throw new InvalidParameterValueException("Volume is not uploaded yet. Please try this operation once the volume is uploaded");
}
}
boolean createVolumeOnBackend = true;
if (rootVolumeOfVm.getState() == Volume.State.Allocated) {
createVolumeOnBackend = false;
if(isVolumeOnSec) {
throw new CloudRuntimeException("Cant attach uploaded volume to the vm which is not created. Please start it and then retry");
}
}
// Create volume on the backend only when VM's root volume is allocated
if (createVolumeOnBackend) {
if (volume.getState().equals(Volume.State.Allocated)
|| volume.getState() == Volume.State.Uploaded) {
try {
volumeOnPrimaryStorage = createVolumeOnPrimaryStorage(vm, rootVolumeOfVm, volume, rootDiskHyperType);
} catch (NoTransitionException e) {
s_logger.debug("Failed to create volume on primary storage", e);
throw new CloudRuntimeException("Failed to create volume on primary storage", e);
}
}
// reload the volume from db
volumeOnPrimaryStorage = volFactory.getVolume(volumeOnPrimaryStorage.getId());
boolean moveVolumeNeeded = needMoveVolume(rootVolumeOfVm, volumeOnPrimaryStorage);
if (moveVolumeNeeded) {
PrimaryDataStoreInfo primaryStore = (PrimaryDataStoreInfo)volumeOnPrimaryStorage.getDataStore();
if (primaryStore.isLocal()) {
throw new CloudRuntimeException(
"Failed to attach local data volume "
+ volume.getName()
+ " to VM "
+ vm.getDisplayName()
+ " as migration of local data volume is not allowed");
}
StoragePoolVO vmRootVolumePool = _storagePoolDao
.findById(rootVolumeOfVm.getPoolId());
try {
volumeOnPrimaryStorage = moveVolume(volumeOnPrimaryStorage,
vmRootVolumePool.getDataCenterId(),
vmRootVolumePool.getPodId(),
vmRootVolumePool.getClusterId(),
dataDiskHyperType);
} catch (ConcurrentOperationException e) {
s_logger.debug("move volume failed", e);
throw new CloudRuntimeException("move volume failed", e);
}
}
}
AsyncJobExecutor asyncExecutor = BaseAsyncJobExecutor
.getCurrentExecutor();
if (asyncExecutor != null) {
AsyncJobVO job = asyncExecutor.getJob();
if (s_logger.isInfoEnabled()) {
s_logger.info("Trying to attaching volume " + volumeId
+ " to vm instance:" + vm.getId()
+ ", update async job-" + job.getId() + " = [ " + job.getUuid()
+ " ] progress status");
}
_asyncMgr.updateAsyncJobAttachment(job.getId(), "volume", volumeId);
_asyncMgr.updateAsyncJobStatus(job.getId(),
BaseCmd.PROGRESS_INSTANCE_CREATED, volumeId);
}
VolumeVO newVol = _volumeDao.findById(volumeOnPrimaryStorage.getId());
newVol = sendAttachVolumeCommand(vm, newVol, deviceId);
return newVol;
}
@Override
public Volume updateVolume(UpdateVolumeCmd cmd){
Long volumeId = cmd.getId();
String path = cmd.getPath();
if(path == null){
throw new InvalidParameterValueException("Failed to update the volume as path was null");
}
VolumeVO volume = ApiDBUtils.findVolumeById(volumeId);
volume.setPath(path);
_volumeDao.update(volumeId, volume);
return volume;
}
@Override
@ActionEvent(eventType = EventTypes.EVENT_VOLUME_DETACH, eventDescription = "detaching volume", async = true)
public Volume detachVolumeFromVM(DetachVolumeCmd cmmd) {
Account caller = UserContext.current().getCaller();
if ((cmmd.getId() == null && cmmd.getDeviceId() == null && cmmd
.getVirtualMachineId() == null)
|| (cmmd.getId() != null && (cmmd.getDeviceId() != null || cmmd
.getVirtualMachineId() != null))
|| (cmmd.getId() == null && (cmmd.getDeviceId() == null || cmmd
.getVirtualMachineId() == null))) {
throw new InvalidParameterValueException(
"Please provide either a volume id, or a tuple(device id, instance id)");
}
Long volumeId = cmmd.getId();
VolumeVO volume = null;
if (volumeId != null) {
volume = _volsDao.findById(volumeId);
} else {
volume = _volsDao.findByInstanceAndDeviceId(
cmmd.getVirtualMachineId(), cmmd.getDeviceId()).get(0);
}
Long vmId = null;
if (cmmd.getVirtualMachineId() == null) {
vmId = volume.getInstanceId();
} else {
vmId = cmmd.getVirtualMachineId();
}
// Check that the volume ID is valid
if (volume == null) {
throw new InvalidParameterValueException(
"Unable to find volume with ID: " + volumeId);
}
// Permissions check
_accountMgr.checkAccess(caller, null, true, volume);
// Check that the volume is a data volume
if (volume.getVolumeType() != Volume.Type.DATADISK) {
throw new InvalidParameterValueException(
"Please specify a data volume.");
}
// Check that the volume is currently attached to a VM
if (vmId == null) {
throw new InvalidParameterValueException(
"The specified volume is not attached to a VM.");
}
// Check that the VM is in the correct state
UserVmVO vm = _userVmDao.findById(vmId);
if (vm.getState() != State.Running && vm.getState() != State.Stopped
&& vm.getState() != State.Destroyed) {
throw new InvalidParameterValueException(
"Please specify a VM that is either running or stopped.");
}
// Check if the VM has VM snapshots
List<VMSnapshotVO> vmSnapshots = _vmSnapshotDao.findByVm(vmId);
if(vmSnapshots.size() > 0){
throw new InvalidParameterValueException(
"Unable to detach volume, the specified volume is attached to a VM that has VM snapshots.");
}
AsyncJobExecutor asyncExecutor = BaseAsyncJobExecutor
.getCurrentExecutor();
if (asyncExecutor != null) {
AsyncJobVO job = asyncExecutor.getJob();
if (s_logger.isInfoEnabled()) {
s_logger.info("Trying to attaching volume " + volumeId
+ "to vm instance:" + vm.getId()
+ ", update async job-" + job.getId() + " = [ " + job.getUuid()
+ " ] progress status");
}
_asyncMgr.updateAsyncJobAttachment(job.getId(), "volume", volumeId);
_asyncMgr.updateAsyncJobStatus(job.getId(),
BaseCmd.PROGRESS_INSTANCE_CREATED, volumeId);
}
String errorMsg = "Failed to detach volume: " + volume.getName()
+ " from VM: " + vm.getHostName();
boolean sendCommand = (vm.getState() == State.Running);
Answer answer = null;
if (sendCommand) {
StoragePoolVO volumePool = _storagePoolDao.findById(volume.getPoolId());
DataTO volTO = volFactory.getVolume(volume.getId()).getTO();
DiskTO disk = new DiskTO(volTO, volume.getDeviceId(), null, volume.getVolumeType());
DettachCommand cmd = new DettachCommand(disk, vm.getInstanceName());
cmd.setManaged(volumePool.isManaged());
cmd.setStorageHost(volumePool.getHostAddress());
cmd.setStoragePort(volumePool.getPort());
cmd.set_iScsiName(volume.get_iScsiName());
try {
answer = _agentMgr.send(vm.getHostId(), cmd);
} catch (Exception e) {
throw new CloudRuntimeException(errorMsg + " due to: "
+ e.getMessage());
}
}
if (!sendCommand || (answer != null && answer.getResult())) {
// Mark the volume as detached
_volsDao.detachVolume(volume.getId());
return _volsDao.findById(volumeId);
} else {
if (answer != null) {
String details = answer.getDetails();
if (details != null && !details.isEmpty()) {
errorMsg += "; " + details;
}
}
throw new CloudRuntimeException(errorMsg);
}
}
@DB
protected VolumeVO switchVolume(VolumeVO existingVolume,
VirtualMachineProfile<? extends VirtualMachine> vm)
throws StorageUnavailableException {
Transaction txn = Transaction.currentTxn();
Long templateIdToUse = null;
Long volTemplateId = existingVolume.getTemplateId();
long vmTemplateId = vm.getTemplateId();
if (volTemplateId != null && volTemplateId.longValue() != vmTemplateId) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("switchVolume: Old Volume's templateId: "
+ volTemplateId
+ " does not match the VM's templateId: "
+ vmTemplateId
+ ", updating templateId in the new Volume");
}
templateIdToUse = vmTemplateId;
}
txn.start();
VolumeVO newVolume = allocateDuplicateVolume(existingVolume,
templateIdToUse);
// In case of Vmware if vm reference is not removed then during root
// disk cleanup
// the vm also gets deleted, so remove the reference
if (vm.getHypervisorType() == HypervisorType.VMware) {
_volsDao.detachVolume(existingVolume.getId());
}
try {
stateTransitTo(existingVolume, Volume.Event.DestroyRequested);
} catch (NoTransitionException e) {
s_logger.debug("Unable to destroy existing volume: " + e.toString());
}
txn.commit();
return newVolume;
}
@Override
public void release(VirtualMachineProfile<? extends VMInstanceVO> profile) {
// add code here
}
@Override
@DB
public void cleanupVolumes(long vmId) throws ConcurrentOperationException {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Cleaning storage for vm: " + vmId);
}
List<VolumeVO> volumesForVm = _volsDao.findByInstance(vmId);
List<VolumeVO> toBeExpunged = new ArrayList<VolumeVO>();
Transaction txn = Transaction.currentTxn();
txn.start();
for (VolumeVO vol : volumesForVm) {
if (vol.getVolumeType().equals(Type.ROOT)) {
// Destroy volume if not already destroyed
boolean volumeAlreadyDestroyed = (vol.getState() == Volume.State.Destroy ||
vol.getState() == Volume.State.Expunged ||
vol.getState() == Volume.State.Expunging);
if (!volumeAlreadyDestroyed) {
volService.destroyVolume(vol.getId());
} else {
s_logger.debug("Skipping destroy for the volume " + vol + " as its in state " + vol.getState().toString());
}
toBeExpunged.add(vol);
} else {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Detaching " + vol);
}
_volsDao.detachVolume(vol.getId());
}
}
txn.commit();
AsyncCallFuture<VolumeApiResult> future = null;
for (VolumeVO expunge : toBeExpunged) {
future = volService.expungeVolumeAsync(volFactory.getVolume(expunge.getId()));
try {
future.get();
} catch (InterruptedException e) {
s_logger.debug("failed expunge volume" + expunge.getId(), e);
} catch (ExecutionException e) {
s_logger.debug("failed expunge volume" + expunge.getId(), e);
}
}
}
@DB
@Override
public Volume migrateVolume(MigrateVolumeCmd cmd) {
Long volumeId = cmd.getVolumeId();
Long storagePoolId = cmd.getStoragePoolId();
VolumeVO vol = _volsDao.findById(volumeId);
if (vol == null) {
throw new InvalidParameterValueException(
"Failed to find the volume id: " + volumeId);
}
if (vol.getState() != Volume.State.Ready) {
throw new InvalidParameterValueException(
"Volume must be in ready state");
}
if (storagePoolId == vol.getPoolId()) {
throw new InvalidParameterValueException("Specified destination pool and the current volume storage pool are same");
}
boolean liveMigrateVolume = false;
Long instanceId = vol.getInstanceId();
VMInstanceVO vm = null;
if (instanceId != null) {
vm = _vmInstanceDao.findById(instanceId);
}
if (vm != null && vm.getState() == State.Running) {
// Check if the underlying hypervisor supports storage motion.
Long hostId = vm.getHostId();
if (hostId != null) {
HostVO host = _hostDao.findById(hostId);
HypervisorCapabilitiesVO capabilities = null;
if (host != null) {
capabilities = _hypervisorCapabilitiesDao.findByHypervisorTypeAndVersion(host.getHypervisorType(),
host.getHypervisorVersion());
}
if (capabilities != null) {
liveMigrateVolume = capabilities.isStorageMotionSupported();
}
}
}
if (liveMigrateVolume && !cmd.isLiveMigrate()) {
throw new InvalidParameterValueException("The volume " + vol + "is attached to a vm and for migrating it " +
"the parameter livemigrate should be specified");
}
StoragePool destPool = (StoragePool)dataStoreMgr.getDataStore(storagePoolId, DataStoreRole.Primary);
if (destPool == null) {
throw new InvalidParameterValueException(
"Failed to find the destination storage pool: "
+ storagePoolId);
}
if (!volumeOnSharedStoragePool(vol)) {
throw new InvalidParameterValueException(
"Migration of volume from local storage pool is not supported");
}
Volume newVol = null;
if (liveMigrateVolume) {
newVol = liveMigrateVolume(vol, destPool);
} else {
newVol = migrateVolume(vol, destPool);
}
return newVol;
}
@DB
protected Volume migrateVolume(Volume volume, StoragePool destPool) {
VolumeInfo vol = volFactory.getVolume(volume.getId());
AsyncCallFuture<VolumeApiResult> future = volService.copyVolume(vol, (DataStore)destPool);
try {
VolumeApiResult result = future.get();
if (result.isFailed()) {
s_logger.error("migrate volume failed:" + result.getResult());
return null;
}
return result.getVolume();
} catch (InterruptedException e) {
s_logger.debug("migrate volume failed", e);
return null;
} catch (ExecutionException e) {
s_logger.debug("migrate volume failed", e);
return null;
}
}
@DB
protected Volume liveMigrateVolume(Volume volume, StoragePool destPool) {
VolumeInfo vol = volFactory.getVolume(volume.getId());
AsyncCallFuture<VolumeApiResult> future = volService.migrateVolume(vol, (DataStore)destPool);
try {
VolumeApiResult result = future.get();
if (result.isFailed()) {
s_logger.debug("migrate volume failed:" + result.getResult());
return null;
}
return result.getVolume();
} catch (InterruptedException e) {
s_logger.debug("migrate volume failed", e);
return null;
} catch (ExecutionException e) {
s_logger.debug("migrate volume failed", e);
return null;
}
}
@Override
public <T extends VMInstanceVO> void migrateVolumes(T vm, VirtualMachineTO vmTo, Host srcHost, Host destHost,
Map<VolumeVO, StoragePoolVO> volumeToPool) {
// Check if all the vms being migrated belong to the vm.
// Check if the storage pool is of the right type.
// Create a VolumeInfo to DataStore map too.
Map<VolumeInfo, DataStore> volumeMap = new HashMap<VolumeInfo, DataStore>();
for (Map.Entry<VolumeVO, StoragePoolVO> entry : volumeToPool.entrySet()) {
VolumeVO volume = entry.getKey();
StoragePoolVO storagePool = entry.getValue();
StoragePool destPool = (StoragePool)dataStoreMgr.getDataStore(storagePool.getId(),
DataStoreRole.Primary);
if (volume.getInstanceId() != vm.getId()) {
throw new CloudRuntimeException("Volume " + volume + " that has to be migrated doesn't belong to the" +
" instance " + vm);
}
if (destPool == null) {
throw new CloudRuntimeException("Failed to find the destination storage pool " + storagePool.getId());
}
volumeMap.put(volFactory.getVolume(volume.getId()), (DataStore)destPool);
}
AsyncCallFuture<CommandResult> future = volService.migrateVolumes(volumeMap, vmTo, srcHost, destHost);
try {
CommandResult result = future.get();
if (result.isFailed()) {
s_logger.debug("Failed to migrated vm " + vm + " along with its volumes. " + result.getResult());
throw new CloudRuntimeException("Failed to migrated vm " + vm + " along with its volumes. " +
result.getResult());
}
} catch (InterruptedException e) {
s_logger.debug("Failed to migrated vm " + vm + " along with its volumes.", e);
} catch (ExecutionException e) {
s_logger.debug("Failed to migrated vm " + vm + " along with its volumes.", e);
}
}
@Override
public boolean storageMigration(
VirtualMachineProfile<? extends VirtualMachine> vm,
StoragePool destPool) {
List<VolumeVO> vols = _volsDao.findUsableVolumesForInstance(vm.getId());
List<Volume> volumesNeedToMigrate = new ArrayList<Volume>();
for (VolumeVO volume : vols) {
if (volume.getState() != Volume.State.Ready) {
s_logger.debug("volume: " + volume.getId() + " is in "
+ volume.getState() + " state");
throw new CloudRuntimeException("volume: " + volume.getId()
+ " is in " + volume.getState() + " state");
}
if (volume.getPoolId() == destPool.getId()) {
s_logger.debug("volume: " + volume.getId()
+ " is on the same storage pool: " + destPool.getId());
continue;
}
volumesNeedToMigrate.add(volume);
}
if (volumesNeedToMigrate.isEmpty()) {
s_logger.debug("No volume need to be migrated");
return true;
}
for (Volume vol : volumesNeedToMigrate) {
Volume result = migrateVolume(vol, destPool);
if (result == null) {
return false;
}
}
return true;
}
@Override
public void prepareForMigration(
VirtualMachineProfile<? extends VirtualMachine> vm,
DeployDestination dest) {
List<VolumeVO> vols = _volsDao.findUsableVolumesForInstance(vm.getId());
if (s_logger.isDebugEnabled()) {
s_logger.debug("Preparing " + vols.size() + " volumes for " + vm);
}
for (VolumeVO vol : vols) {
DataTO volTO = volFactory.getVolume(vol.getId()).getTO();
DiskTO disk = new DiskTO(volTO, vol.getDeviceId(), null, vol.getVolumeType());
vm.addDisk(disk);
}
if (vm.getType() == VirtualMachine.Type.User && vm.getTemplate().getFormat() == ImageFormat.ISO) {
DataTO dataTO = tmplFactory.getTemplate(vm.getTemplate().getId(), DataStoreRole.Image, vm.getVirtualMachine().getDataCenterId()).getTO();
DiskTO iso = new DiskTO(dataTO, 3L, null, Volume.Type.ISO);
vm.addDisk(iso);
}
}
private static enum VolumeTaskType {
RECREATE,
NOP,
MIGRATE
}
private static class VolumeTask {
final VolumeTaskType type;
final StoragePoolVO pool;
final VolumeVO volume;
VolumeTask(VolumeTaskType type, VolumeVO volume, StoragePoolVO pool) {
this.type = type;
this.pool = pool;
this.volume = volume;
}
}
private List<VolumeTask> getTasks(List<VolumeVO> vols, Map<Volume, StoragePool> destVols) throws StorageUnavailableException {
boolean recreate = _recreateSystemVmEnabled;
List<VolumeTask> tasks = new ArrayList<VolumeTask>();
for (VolumeVO vol : vols) {
StoragePoolVO assignedPool = null;
if (destVols != null) {
StoragePool pool = destVols.get(vol);
if (pool != null) {
assignedPool = _storagePoolDao.findById(pool.getId());
}
}
if (assignedPool == null && recreate) {
assignedPool = _storagePoolDao.findById(vol.getPoolId());
}
if (assignedPool != null || recreate) {
Volume.State state = vol.getState();
if (state == Volume.State.Allocated
|| state == Volume.State.Creating) {
VolumeTask task = new VolumeTask(VolumeTaskType.RECREATE, vol, null);
tasks.add(task);
} else {
if (vol.isRecreatable()) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Volume " + vol
+ " will be recreated on storage pool "
+ assignedPool
+ " assigned by deploymentPlanner");
}
VolumeTask task = new VolumeTask(VolumeTaskType.RECREATE, vol, null);
tasks.add(task);
} else {
if (assignedPool.getId() != vol.getPoolId()) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Mismatch in storage pool "
+ assignedPool
+ " assigned by deploymentPlanner and the one associated with volume "
+ vol);
}
DiskOfferingVO diskOffering = _diskOfferingDao
.findById(vol.getDiskOfferingId());
if (diskOffering.getUseLocalStorage()) {
// Currently migration of local volume is not supported so bail out
if (s_logger.isDebugEnabled()) {
s_logger.debug("Local volume "
+ vol
+ " cannot be recreated on storagepool "
+ assignedPool
+ " assigned by deploymentPlanner");
}
throw new CloudRuntimeException("Local volume " + vol + " cannot be recreated on storagepool " + assignedPool + " assigned by deploymentPlanner");
} else {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Shared volume "
+ vol
+ " will be migrated on storage pool "
+ assignedPool
+ " assigned by deploymentPlanner");
}
VolumeTask task = new VolumeTask(VolumeTaskType.MIGRATE, vol, assignedPool);
tasks.add(task);
}
} else {
StoragePoolVO pool = _storagePoolDao
.findById(vol.getPoolId());
VolumeTask task = new VolumeTask(VolumeTaskType.NOP, vol, pool);
tasks.add(task);
}
}
}
} else {
if (vol.getPoolId() == null) {
throw new StorageUnavailableException(
"Volume has no pool associate and also no storage pool assigned in DeployDestination, Unable to create "
+ vol, Volume.class, vol.getId());
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("No need to recreate the volume: " + vol
+ ", since it already has a pool assigned: "
+ vol.getPoolId() + ", adding disk to VM");
}
StoragePoolVO pool = _storagePoolDao.findById(vol
.getPoolId());
VolumeTask task = new VolumeTask(VolumeTaskType.NOP, vol, pool);
tasks.add(task);
}
}
return tasks;
}
private Pair<VolumeVO, DataStore> recreateVolume(VolumeVO vol, VirtualMachineProfile<? extends VirtualMachine> vm,
DeployDestination dest) throws StorageUnavailableException {
VolumeVO newVol;
boolean recreate = _recreateSystemVmEnabled;
DataStore destPool = null;
if (recreate
&& (dest.getStorageForDisks() == null || dest
.getStorageForDisks().get(vol) == null)) {
destPool = dataStoreMgr.getDataStore(vol.getPoolId(), DataStoreRole.Primary);
s_logger.debug("existing pool: " + destPool.getId());
} else {
StoragePool pool = dest.getStorageForDisks().get(vol);
destPool = dataStoreMgr.getDataStore(pool.getId(), DataStoreRole.Primary);
}
if (vol.getState() == Volume.State.Allocated
|| vol.getState() == Volume.State.Creating) {
newVol = vol;
} else {
newVol = switchVolume(vol, vm);
// update the volume->PrimaryDataStoreVO map since volumeId has
// changed
if (dest.getStorageForDisks() != null
&& dest.getStorageForDisks().containsKey(vol)) {
StoragePool poolWithOldVol = dest
.getStorageForDisks().get(vol);
dest.getStorageForDisks().put(newVol, poolWithOldVol);
dest.getStorageForDisks().remove(vol);
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("Created new volume " + newVol
+ " for old volume " + vol);
}
}
VolumeInfo volume = volFactory.getVolume(newVol.getId(), destPool);
Long templateId = newVol.getTemplateId();
AsyncCallFuture<VolumeApiResult> future = null;
if (templateId == null) {
future = volService.createVolumeAsync(volume, destPool);
} else {
TemplateInfo templ = tmplFactory.getTemplate(templateId, DataStoreRole.Image);
future = volService.createVolumeFromTemplateAsync(volume, destPool.getId(), templ);
}
VolumeApiResult result = null;
try {
result = future.get();
if (result.isFailed()) {
s_logger.debug("Unable to create "
+ newVol + ":" + result.getResult());
throw new StorageUnavailableException("Unable to create "
+ newVol + ":" + result.getResult(), destPool.getId());
}
newVol = _volsDao.findById(newVol.getId());
} catch (InterruptedException e) {
s_logger.error("Unable to create " + newVol, e);
throw new StorageUnavailableException("Unable to create "
+ newVol + ":" + e.toString(), destPool.getId());
} catch (ExecutionException e) {
s_logger.error("Unable to create " + newVol, e);
throw new StorageUnavailableException("Unable to create "
+ newVol + ":" + e.toString(), destPool.getId());
}
return new Pair<VolumeVO, DataStore>(newVol, destPool);
}
@Override
public void prepare(VirtualMachineProfile<? extends VirtualMachine> vm,
DeployDestination dest) throws StorageUnavailableException,
InsufficientStorageCapacityException, ConcurrentOperationException {
if (dest == null) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("DeployDestination cannot be null, cannot prepare Volumes for the vm: "
+ vm);
}
throw new CloudRuntimeException(
"Unable to prepare Volume for vm because DeployDestination is null, vm:"
+ vm);
}
List<VolumeVO> vols = _volsDao.findUsableVolumesForInstance(vm.getId());
if (s_logger.isDebugEnabled()) {
s_logger.debug("Checking if we need to prepare " + vols.size()
+ " volumes for " + vm);
}
List<VolumeTask> tasks = getTasks(vols, dest.getStorageForDisks());
Volume vol = null;
StoragePool pool = null;
for (VolumeTask task : tasks) {
if (task.type == VolumeTaskType.NOP) {
pool = (StoragePool)dataStoreMgr.getDataStore(task.pool.getId(), DataStoreRole.Primary);
vol = task.volume;
} else if (task.type == VolumeTaskType.MIGRATE) {
pool = (StoragePool)dataStoreMgr.getDataStore(task.pool.getId(), DataStoreRole.Primary);
migrateVolume(task.volume, pool);
vol = task.volume;
} else if (task.type == VolumeTaskType.RECREATE) {
Pair<VolumeVO, DataStore> result = recreateVolume(task.volume, vm, dest);
pool = (StoragePool)dataStoreMgr.getDataStore(result.second().getId(), DataStoreRole.Primary);
vol = result.first();
}
DataTO volumeTO = volFactory.getVolume(vol.getId()).getTO();
DiskTO disk = new DiskTO(volumeTO, vol.getDeviceId(), null, vol.getVolumeType());
vm.addDisk(disk);
}
}
private Long getDeviceId(long vmId, Long deviceId) {
// allocate deviceId
List<VolumeVO> vols = _volsDao.findByInstance(vmId);
if (deviceId != null) {
if (deviceId.longValue() > 15 || deviceId.longValue() == 0
|| deviceId.longValue() == 3) {
throw new RuntimeException("deviceId should be 1,2,4-15");
}
for (VolumeVO vol : vols) {
if (vol.getDeviceId().equals(deviceId)) {
throw new RuntimeException("deviceId " + deviceId
+ " is used by vm" + vmId);
}
}
} else {
// allocate deviceId here
List<String> devIds = new ArrayList<String>();
for (int i = 1; i < 15; i++) {
devIds.add(String.valueOf(i));
}
devIds.remove("3");
for (VolumeVO vol : vols) {
devIds.remove(vol.getDeviceId().toString().trim());
}
deviceId = Long.parseLong(devIds.iterator().next());
}
return deviceId;
}
private boolean stateTransitTo(Volume vol, Volume.Event event)
throws NoTransitionException {
return _volStateMachine.transitTo(vol, event, null, _volsDao);
}
@Override
public boolean canVmRestartOnAnotherServer(long vmId) {
List<VolumeVO> vols = _volsDao.findCreatedByInstance(vmId);
for (VolumeVO vol : vols) {
if (!vol.isRecreatable() && !vol.getPoolType().isShared()) {
return false;
}
}
return true;
}
@Override
public boolean configure(String name, Map<String, Object> params)
throws ConfigurationException {
String _customDiskOfferingMinSizeStr = _configDao
.getValue(Config.CustomDiskOfferingMinSize.toString());
_customDiskOfferingMinSize = NumbersUtil.parseInt(
_customDiskOfferingMinSizeStr, Integer
.parseInt(Config.CustomDiskOfferingMinSize
.getDefaultValue()));
String maxVolumeSizeInGbString = _configDao
.getValue("storage.max.volume.size");
_maxVolumeSizeInGb = NumbersUtil.parseLong(maxVolumeSizeInGbString,
2000);
String value = _configDao.getValue(Config.RecreateSystemVmEnabled.key());
_recreateSystemVmEnabled = Boolean.parseBoolean(value);
_copyvolumewait = NumbersUtil.parseInt(value,
Integer.parseInt(Config.CopyVolumeWait.getDefaultValue()));
return true;
}
@Override
public boolean start() {
return true;
}
@Override
public boolean stop() {
return true;
}
@Override
public String getName() {
return "Volume Manager";
}
@Override
public void destroyVolume(VolumeVO volume) {
try {
volService.destroyVolume(volume.getId());
} catch (ConcurrentOperationException e) {
s_logger.debug("Failed to destroy volume" + volume.getId(), e);
throw new CloudRuntimeException("Failed to destroy volume" + volume.getId(), e);
}
}
@Override
public Snapshot takeSnapshot(Long volumeId, Long policyId, Long snapshotId, Account account) throws ResourceAllocationException {
VolumeInfo volume = volFactory.getVolume(volumeId);
if (volume == null) {
throw new InvalidParameterValueException("Creating snapshot failed due to volume:" + volumeId + " doesn't exist");
}
if (volume.getState() != Volume.State.Ready) {
throw new InvalidParameterValueException("VolumeId: " + volumeId + " is not in " + Volume.State.Ready + " state but " + volume.getState() + ". Cannot take snapshot.");
}
CreateSnapshotPayload payload = new CreateSnapshotPayload();
payload.setSnapshotId(snapshotId);
payload.setSnapshotPolicyId(policyId);
payload.setAccount(account);
volume.addPayload(payload);
return volService.takeSnapshot(volume);
}
@Override
public Snapshot allocSnapshot(Long volumeId, Long policyId) throws ResourceAllocationException {
Account caller = UserContext.current().getCaller();
VolumeInfo volume = volFactory.getVolume(volumeId);
if (volume == null) {
throw new InvalidParameterValueException("Creating snapshot failed due to volume:" + volumeId + " doesn't exist");
}
DataCenter zone = _dcDao.findById(volume.getDataCenterId());
if (zone == null) {
throw new InvalidParameterValueException("Can't find zone by id " + volume.getDataCenterId());
}
if (Grouping.AllocationState.Disabled == zone.getAllocationState() && !_accountMgr.isRootAdmin(caller.getType())) {
throw new PermissionDeniedException("Cannot perform this operation, Zone is currently disabled: " + zone.getName());
}
if (volume.getState() != Volume.State.Ready) {
throw new InvalidParameterValueException("VolumeId: " + volumeId + " is not in " + Volume.State.Ready + " state but " + volume.getState() + ". Cannot take snapshot.");
}
if ( volume.getTemplateId() != null ) {
VMTemplateVO template = _templateDao.findById(volume.getTemplateId());
if( template != null && template.getTemplateType() == Storage.TemplateType.SYSTEM ) {
throw new InvalidParameterValueException("VolumeId: " + volumeId + " is for System VM , Creating snapshot against System VM volumes is not supported");
}
}
StoragePool storagePool = (StoragePool)volume.getDataStore();
if (storagePool == null) {
throw new InvalidParameterValueException("VolumeId: " + volumeId + " please attach this volume to a VM before create snapshot for it");
}
return snapshotMgr.allocSnapshot(volumeId, policyId);
}
@Override
@ActionEvent(eventType = EventTypes.EVENT_VOLUME_EXTRACT, eventDescription = "extracting volume", async = true)
public String extractVolume(ExtractVolumeCmd cmd) {
Long volumeId = cmd.getId();
Long zoneId = cmd.getZoneId();
String mode = cmd.getMode();
Account account = UserContext.current().getCaller();
if (!_accountMgr.isRootAdmin(account.getType()) && ApiDBUtils.isExtractionDisabled()) {
throw new PermissionDeniedException("Extraction has been disabled by admin");
}
VolumeVO volume = _volumeDao.findById(volumeId);
if (volume == null) {
InvalidParameterValueException ex = new InvalidParameterValueException("Unable to find volume with specified volumeId");
ex.addProxyObject(volumeId.toString(), "volumeId");
throw ex;
}
// perform permission check
_accountMgr.checkAccess(account, null, true, volume);
if (_dcDao.findById(zoneId) == null) {
throw new InvalidParameterValueException("Please specify a valid zone.");
}
if (volume.getPoolId() == null) {
throw new InvalidParameterValueException("The volume doesnt belong to a storage pool so cant extract it");
}
// Extract activity only for detached volumes or for volumes whose
// instance is stopped
if (volume.getInstanceId() != null && ApiDBUtils.findVMInstanceById(volume.getInstanceId()).getState() != State.Stopped) {
s_logger.debug("Invalid state of the volume with ID: " + volumeId
+ ". It should be either detached or the VM should be in stopped state.");
PermissionDeniedException ex = new PermissionDeniedException(
"Invalid state of the volume with specified ID. It should be either detached or the VM should be in stopped state.");
ex.addProxyObject(volume.getUuid(), "volumeId");
throw ex;
}
if (volume.getVolumeType() != Volume.Type.DATADISK) {
// Datadisk dont have any template dependence.
VMTemplateVO template = ApiDBUtils.findTemplateById(volume.getTemplateId());
if (template != null) { // For ISO based volumes template = null and
// we allow extraction of all ISO based
// volumes
boolean isExtractable = template.isExtractable() && template.getTemplateType() != Storage.TemplateType.SYSTEM;
if (!isExtractable && account != null && account.getType() != Account.ACCOUNT_TYPE_ADMIN) {
// Global admins are always allowed to extract
PermissionDeniedException ex = new PermissionDeniedException("The volume with specified volumeId is not allowed to be extracted");
ex.addProxyObject(volume.getUuid(), "volumeId");
throw ex;
}
}
}
Upload.Mode extractMode;
if (mode == null || (!mode.equals(Upload.Mode.FTP_UPLOAD.toString()) && !mode.equals(Upload.Mode.HTTP_DOWNLOAD.toString()))) {
throw new InvalidParameterValueException("Please specify a valid extract Mode ");
} else {
extractMode = mode.equals(Upload.Mode.FTP_UPLOAD.toString()) ? Upload.Mode.FTP_UPLOAD : Upload.Mode.HTTP_DOWNLOAD;
}
// Check if the url already exists
VolumeDataStoreVO volumeStoreRef = _volumeStoreDao.findByVolume(volumeId);
if(volumeStoreRef != null && volumeStoreRef.getExtractUrl() != null){
return volumeStoreRef.getExtractUrl();
}
// Clean up code to remove all those previous uploadVO and uploadMonitor code. Previous code is trying to fake an async operation purely in
// db table with uploadVO and async_job entry, but internal implementation is actually synchronous.
ImageStoreEntity secStore = (ImageStoreEntity) dataStoreMgr.getImageStore(zoneId);
// Copy volume from primary to secondary storage
VolumeInfo srcVol = volFactory.getVolume(volume.getId());
AsyncCallFuture<VolumeApiResult> cvAnswer = volService.copyVolume(srcVol, secStore);
// Check if you got a valid answer.
VolumeApiResult cvResult = null;
try {
cvResult = cvAnswer.get();
} catch (InterruptedException e1) {
s_logger.debug("failed copy volume", e1);
throw new CloudRuntimeException("Failed to copy volume", e1);
} catch (ExecutionException e1) {
s_logger.debug("failed copy volume", e1);
throw new CloudRuntimeException("Failed to copy volume", e1);
}
if (cvResult == null || cvResult.isFailed()) {
String errorString = "Failed to copy the volume from the source primary storage pool to secondary storage.";
throw new CloudRuntimeException(errorString);
}
VolumeInfo vol = cvResult.getVolume();
String extractUrl = secStore.createEntityExtractUrl(vol.getPath(), vol.getFormat(), vol);
volumeStoreRef = _volumeStoreDao.findByVolume(volumeId);
volumeStoreRef.setExtractUrl(extractUrl);
volumeStoreRef.setExtractUrlCreated(DateUtil.now());
_volumeStoreDao.update(volumeStoreRef.getId(), volumeStoreRef);
return extractUrl;
}
private String getFormatForPool(StoragePool pool) {
ClusterVO cluster = ApiDBUtils.findClusterById(pool.getClusterId());
if (cluster.getHypervisorType() == HypervisorType.XenServer) {
return "vhd";
} else if (cluster.getHypervisorType() == HypervisorType.KVM) {
return "qcow2";
} else if (cluster.getHypervisorType() == HypervisorType.VMware) {
return "ova";
} else if (cluster.getHypervisorType() == HypervisorType.Ovm) {
return "raw";
} else {
return null;
}
}
@Override
public String getVmNameFromVolumeId(long volumeId) {
VolumeVO volume = _volsDao.findById(volumeId);
return getVmNameOnVolume(volume);
}
@Override
public String getStoragePoolOfVolume(long volumeId) {
VolumeVO vol = _volsDao.findById(volumeId);
return dataStoreMgr.getPrimaryDataStore(vol.getPoolId()).getUuid();
}
}
| server/src/com/cloud/storage/VolumeManagerImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.cloud.storage;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import javax.inject.Inject;
import javax.naming.ConfigurationException;
import com.cloud.utils.DateUtil;
import com.cloud.utils.EnumUtils;
import com.cloud.utils.NumbersUtil;
import com.cloud.utils.Pair;
import com.cloud.utils.UriUtils;
import org.apache.log4j.Logger;
import org.springframework.stereotype.Component;
import org.apache.cloudstack.api.BaseCmd;
import org.apache.cloudstack.api.command.user.volume.AttachVolumeCmd;
import org.apache.cloudstack.api.command.user.volume.CreateVolumeCmd;
import org.apache.cloudstack.api.command.user.volume.DetachVolumeCmd;
import org.apache.cloudstack.api.command.user.volume.ExtractVolumeCmd;
import org.apache.cloudstack.api.command.user.volume.MigrateVolumeCmd;
import org.apache.cloudstack.api.command.user.volume.ResizeVolumeCmd;
import org.apache.cloudstack.api.command.user.volume.UpdateVolumeCmd;
import org.apache.cloudstack.api.command.user.volume.UploadVolumeCmd;
import org.apache.cloudstack.engine.subsystem.api.storage.ChapInfo;
import org.apache.cloudstack.engine.subsystem.api.storage.DataStore;
import org.apache.cloudstack.engine.subsystem.api.storage.DataStoreManager;
import org.apache.cloudstack.engine.subsystem.api.storage.DataStoreProviderManager;
import org.apache.cloudstack.engine.subsystem.api.storage.HostScope;
import org.apache.cloudstack.engine.subsystem.api.storage.PrimaryDataStoreInfo;
import org.apache.cloudstack.engine.subsystem.api.storage.Scope;
import org.apache.cloudstack.engine.subsystem.api.storage.SnapshotDataFactory;
import org.apache.cloudstack.engine.subsystem.api.storage.SnapshotInfo;
import org.apache.cloudstack.engine.subsystem.api.storage.StoragePoolAllocator;
import org.apache.cloudstack.engine.subsystem.api.storage.TemplateDataFactory;
import org.apache.cloudstack.engine.subsystem.api.storage.TemplateInfo;
import org.apache.cloudstack.engine.subsystem.api.storage.VolumeDataFactory;
import org.apache.cloudstack.engine.subsystem.api.storage.VolumeInfo;
import org.apache.cloudstack.engine.subsystem.api.storage.VolumeService;
import org.apache.cloudstack.engine.subsystem.api.storage.VolumeService.VolumeApiResult;
import org.apache.cloudstack.framework.async.AsyncCallFuture;
import org.apache.cloudstack.storage.command.AttachAnswer;
import org.apache.cloudstack.storage.command.AttachCommand;
import org.apache.cloudstack.storage.command.CommandResult;
import org.apache.cloudstack.storage.command.DettachCommand;
import org.apache.cloudstack.storage.datastore.db.PrimaryDataStoreDao;
import org.apache.cloudstack.storage.datastore.db.StoragePoolDetailsDao;
import org.apache.cloudstack.storage.datastore.db.StoragePoolVO;
import org.apache.cloudstack.storage.datastore.db.TemplateDataStoreDao;
import org.apache.cloudstack.storage.datastore.db.TemplateDataStoreVO;
import org.apache.cloudstack.storage.datastore.db.VolumeDataStoreDao;
import org.apache.cloudstack.storage.datastore.db.VolumeDataStoreVO;
import org.apache.cloudstack.storage.image.datastore.ImageStoreEntity;
import com.cloud.agent.AgentManager;
import com.cloud.agent.api.Answer;
import com.cloud.agent.api.to.DataTO;
import com.cloud.agent.api.to.DiskTO;
import com.cloud.agent.api.to.VirtualMachineTO;
import com.cloud.alert.AlertManager;
import com.cloud.api.ApiDBUtils;
import com.cloud.async.AsyncJobExecutor;
import com.cloud.async.AsyncJobManager;
import com.cloud.async.AsyncJobVO;
import com.cloud.async.BaseAsyncJobExecutor;
import com.cloud.capacity.CapacityManager;
import com.cloud.capacity.dao.CapacityDao;
import com.cloud.configuration.Config;
import com.cloud.configuration.ConfigurationManager;
import com.cloud.configuration.Resource.ResourceType;
import com.cloud.configuration.dao.ConfigurationDao;
import com.cloud.consoleproxy.ConsoleProxyManager;
import com.cloud.dc.ClusterVO;
import com.cloud.dc.DataCenter;
import com.cloud.dc.DataCenterVO;
import com.cloud.dc.HostPodVO;
import com.cloud.dc.dao.ClusterDao;
import com.cloud.dc.dao.DataCenterDao;
import com.cloud.dc.dao.HostPodDao;
import com.cloud.deploy.DeployDestination;
import com.cloud.domain.Domain;
import com.cloud.domain.dao.DomainDao;
import com.cloud.event.ActionEvent;
import com.cloud.event.EventTypes;
import com.cloud.event.UsageEventUtils;
import com.cloud.event.dao.EventDao;
import com.cloud.exception.ConcurrentOperationException;
import com.cloud.exception.InsufficientStorageCapacityException;
import com.cloud.exception.InvalidParameterValueException;
import com.cloud.exception.PermissionDeniedException;
import com.cloud.exception.ResourceAllocationException;
import com.cloud.exception.StorageUnavailableException;
import com.cloud.host.Host;
import com.cloud.host.HostVO;
import com.cloud.host.dao.HostDao;
import com.cloud.hypervisor.Hypervisor.HypervisorType;
import com.cloud.hypervisor.HypervisorCapabilitiesVO;
import com.cloud.hypervisor.HypervisorGuruManager;
import com.cloud.hypervisor.dao.HypervisorCapabilitiesDao;
import com.cloud.network.NetworkModel;
import com.cloud.org.Grouping;
import com.cloud.resource.ResourceManager;
import com.cloud.server.ManagementServer;
import com.cloud.service.ServiceOfferingVO;
import com.cloud.service.dao.ServiceOfferingDao;
import com.cloud.storage.Storage.ImageFormat;
import com.cloud.storage.Volume.Type;
import com.cloud.storage.dao.DiskOfferingDao;
import com.cloud.storage.dao.SnapshotDao;
import com.cloud.storage.dao.SnapshotPolicyDao;
import com.cloud.storage.dao.StoragePoolHostDao;
import com.cloud.storage.dao.StoragePoolWorkDao;
import com.cloud.storage.dao.UploadDao;
import com.cloud.storage.dao.VMTemplateDao;
import com.cloud.storage.dao.VMTemplatePoolDao;
import com.cloud.storage.dao.VolumeDao;
import com.cloud.storage.dao.VolumeDetailsDao;
import com.cloud.storage.download.DownloadMonitor;
import com.cloud.storage.secondary.SecondaryStorageVmManager;
import com.cloud.storage.snapshot.SnapshotApiService;
import com.cloud.storage.snapshot.SnapshotManager;
import com.cloud.storage.snapshot.SnapshotScheduler;
import com.cloud.storage.upload.UploadMonitor;
import com.cloud.tags.dao.ResourceTagDao;
import com.cloud.template.TemplateManager;
import com.cloud.user.Account;
import com.cloud.user.AccountManager;
import com.cloud.user.ResourceLimitService;
import com.cloud.user.UserContext;
import com.cloud.user.VmDiskStatisticsVO;
import com.cloud.user.dao.AccountDao;
import com.cloud.user.dao.UserDao;
import com.cloud.user.dao.VmDiskStatisticsDao;
import com.cloud.uservm.UserVm;
import com.cloud.utils.component.ManagerBase;
import com.cloud.utils.db.DB;
import com.cloud.utils.db.Transaction;
import com.cloud.utils.exception.CloudRuntimeException;
import com.cloud.utils.fsm.NoTransitionException;
import com.cloud.utils.fsm.StateMachine2;
import com.cloud.vm.DiskProfile;
import com.cloud.vm.UserVmManager;
import com.cloud.vm.UserVmVO;
import com.cloud.vm.VMInstanceVO;
import com.cloud.vm.VirtualMachine;
import com.cloud.vm.VirtualMachine.State;
import com.cloud.vm.VirtualMachineManager;
import com.cloud.vm.VirtualMachineProfile;
import com.cloud.vm.dao.ConsoleProxyDao;
import com.cloud.vm.dao.DomainRouterDao;
import com.cloud.vm.dao.SecondaryStorageVmDao;
import com.cloud.vm.dao.UserVmDao;
import com.cloud.vm.dao.VMInstanceDao;
import com.cloud.vm.snapshot.VMSnapshotVO;
import com.cloud.vm.snapshot.dao.VMSnapshotDao;
@Component
public class VolumeManagerImpl extends ManagerBase implements VolumeManager {
private static final Logger s_logger = Logger
.getLogger(VolumeManagerImpl.class);
@Inject
protected UserVmManager _userVmMgr;
@Inject
protected AgentManager _agentMgr;
@Inject
protected TemplateManager _tmpltMgr;
@Inject
protected AsyncJobManager _asyncMgr;
@Inject
protected SnapshotManager _snapshotMgr;
@Inject
protected SnapshotScheduler _snapshotScheduler;
@Inject
protected AccountManager _accountMgr;
@Inject
protected ConfigurationManager _configMgr;
@Inject
protected ConsoleProxyManager _consoleProxyMgr;
@Inject
protected SecondaryStorageVmManager _secStorageMgr;
@Inject
protected NetworkModel _networkMgr;
@Inject
protected ServiceOfferingDao _serviceOfferingDao;
@Inject
protected VolumeDao _volsDao;
@Inject
protected HostDao _hostDao;
@Inject
protected ConsoleProxyDao _consoleProxyDao;
@Inject
protected SnapshotDao _snapshotDao;
@Inject
protected SnapshotManager _snapMgr;
@Inject
protected SnapshotPolicyDao _snapshotPolicyDao;
@Inject
protected StoragePoolHostDao _storagePoolHostDao;
@Inject
StoragePoolDetailsDao storagePoolDetailsDao;
@Inject
protected AlertManager _alertMgr;
@Inject
protected TemplateDataStoreDao _vmTemplateStoreDao = null;
@Inject
protected VMTemplatePoolDao _vmTemplatePoolDao = null;
@Inject
protected VMTemplateDao _vmTemplateDao = null;
@Inject
protected StoragePoolHostDao _poolHostDao = null;
@Inject
protected UserVmDao _userVmDao;
@Inject
VolumeDataStoreDao _volumeStoreDao;
@Inject
protected VMInstanceDao _vmInstanceDao;
@Inject
protected PrimaryDataStoreDao _storagePoolDao = null;
@Inject
protected CapacityDao _capacityDao;
@Inject
protected CapacityManager _capacityMgr;
@Inject
protected DiskOfferingDao _diskOfferingDao;
@Inject
protected AccountDao _accountDao;
@Inject
protected EventDao _eventDao = null;
@Inject
protected DataCenterDao _dcDao = null;
@Inject
protected HostPodDao _podDao = null;
@Inject
protected VMTemplateDao _templateDao;
@Inject
protected ServiceOfferingDao _offeringDao;
@Inject
protected DomainDao _domainDao;
@Inject
protected UserDao _userDao;
@Inject
protected ClusterDao _clusterDao;
@Inject
protected VirtualMachineManager _vmMgr;
@Inject
protected DomainRouterDao _domrDao;
@Inject
protected SecondaryStorageVmDao _secStrgDao;
@Inject
protected StoragePoolWorkDao _storagePoolWorkDao;
@Inject
protected HypervisorGuruManager _hvGuruMgr;
@Inject
protected VolumeDao _volumeDao;
@Inject
protected OCFS2Manager _ocfs2Mgr;
@Inject
protected ResourceLimitService _resourceLimitMgr;
@Inject
protected SecondaryStorageVmManager _ssvmMgr;
@Inject
protected ResourceManager _resourceMgr;
@Inject
protected DownloadMonitor _downloadMonitor;
@Inject
protected ResourceTagDao _resourceTagDao;
@Inject
protected VmDiskStatisticsDao _vmDiskStatsDao;
@Inject
protected VMSnapshotDao _vmSnapshotDao;
@Inject
protected List<StoragePoolAllocator> _storagePoolAllocators;
@Inject
ConfigurationDao _configDao;
@Inject
VolumeDetailsDao _volDetailDao;
@Inject
ManagementServer _msServer;
@Inject
DataStoreManager dataStoreMgr;
@Inject
DataStoreProviderManager dataStoreProviderMgr;
@Inject
VolumeService volService;
@Inject
VolumeDataFactory volFactory;
@Inject
TemplateDataFactory tmplFactory;
@Inject
SnapshotDataFactory snapshotFactory;
@Inject
SnapshotApiService snapshotMgr;
@Inject
UploadMonitor _uploadMonitor;
@Inject
UploadDao _uploadDao;
private int _copyvolumewait;
@Inject
protected HypervisorCapabilitiesDao _hypervisorCapabilitiesDao;
private final StateMachine2<Volume.State, Volume.Event, Volume> _volStateMachine;
@Inject
StorageManager storageMgr;
private int _customDiskOfferingMinSize = 1;
private final int _customDiskOfferingMaxSize = 1024;
private long _maxVolumeSizeInGb;
private boolean _recreateSystemVmEnabled;
public VolumeManagerImpl() {
_volStateMachine = Volume.State.getStateMachine();
}
@Override
public VolumeInfo moveVolume(VolumeInfo volume, long destPoolDcId,
Long destPoolPodId, Long destPoolClusterId,
HypervisorType dataDiskHyperType)
throws ConcurrentOperationException {
// Find a destination storage pool with the specified criteria
DiskOfferingVO diskOffering = _diskOfferingDao.findById(volume
.getDiskOfferingId());
DiskProfile dskCh = new DiskProfile(volume.getId(),
volume.getVolumeType(), volume.getName(), diskOffering.getId(),
diskOffering.getDiskSize(), diskOffering.getTagsArray(),
diskOffering.getUseLocalStorage(),
diskOffering.isRecreatable(), null);
dskCh.setHyperType(dataDiskHyperType);
DataCenterVO destPoolDataCenter = _dcDao.findById(destPoolDcId);
HostPodVO destPoolPod = _podDao.findById(destPoolPodId);
StoragePool destPool = storageMgr.findStoragePool(dskCh,
destPoolDataCenter, destPoolPod, destPoolClusterId, null, null,
new HashSet<StoragePool>());
if (destPool == null) {
throw new CloudRuntimeException(
"Failed to find a storage pool with enough capacity to move the volume to.");
}
Volume newVol = migrateVolume(volume, destPool);
return volFactory.getVolume(newVol.getId());
}
/*
* Upload the volume to secondary storage.
*/
@Override
@DB
@ActionEvent(eventType = EventTypes.EVENT_VOLUME_UPLOAD, eventDescription = "uploading volume", async = true)
public VolumeVO uploadVolume(UploadVolumeCmd cmd)
throws ResourceAllocationException {
Account caller = UserContext.current().getCaller();
long ownerId = cmd.getEntityOwnerId();
Account owner = _accountDao.findById(ownerId);
Long zoneId = cmd.getZoneId();
String volumeName = cmd.getVolumeName();
String url = cmd.getUrl();
String format = cmd.getFormat();
String imageStoreUuid = cmd.getImageStoreUuid();
DataStore store = _tmpltMgr.getImageStore(imageStoreUuid, zoneId);
validateVolume(caller, ownerId, zoneId, volumeName, url, format);
VolumeVO volume = persistVolume(owner, zoneId, volumeName,
url, cmd.getFormat());
VolumeInfo vol = volFactory.getVolume(volume.getId());
RegisterVolumePayload payload = new RegisterVolumePayload(cmd.getUrl(), cmd.getChecksum(),
cmd.getFormat());
vol.addPayload(payload);
volService.registerVolume(vol, store);
return volume;
}
private boolean validateVolume(Account caller, long ownerId, Long zoneId,
String volumeName, String url, String format)
throws ResourceAllocationException {
// permission check
_accountMgr.checkAccess(caller, null, true,
_accountMgr.getActiveAccountById(ownerId));
// Check that the resource limit for volumes won't be exceeded
_resourceLimitMgr.checkResourceLimit(_accountMgr.getAccount(ownerId),
ResourceType.volume);
// Verify that zone exists
DataCenterVO zone = _dcDao.findById(zoneId);
if (zone == null) {
throw new InvalidParameterValueException(
"Unable to find zone by id " + zoneId);
}
// Check if zone is disabled
if (Grouping.AllocationState.Disabled == zone.getAllocationState()
&& !_accountMgr.isRootAdmin(caller.getType())) {
throw new PermissionDeniedException(
"Cannot perform this operation, Zone is currently disabled: "
+ zoneId);
}
if (url.toLowerCase().contains("file://")) {
throw new InvalidParameterValueException(
"File:// type urls are currently unsupported");
}
ImageFormat imgfmt = ImageFormat.valueOf(format.toUpperCase());
if (imgfmt == null) {
throw new IllegalArgumentException("Image format is incorrect "
+ format + ". Supported formats are "
+ EnumUtils.listValues(ImageFormat.values()));
}
String userSpecifiedName = volumeName;
if (userSpecifiedName == null) {
userSpecifiedName = getRandomVolumeName();
}
if ((!url.toLowerCase().endsWith("vhd"))
&& (!url.toLowerCase().endsWith("vhd.zip"))
&& (!url.toLowerCase().endsWith("vhd.bz2"))
&& (!url.toLowerCase().endsWith("vhd.gz"))
&& (!url.toLowerCase().endsWith("qcow2"))
&& (!url.toLowerCase().endsWith("qcow2.zip"))
&& (!url.toLowerCase().endsWith("qcow2.bz2"))
&& (!url.toLowerCase().endsWith("qcow2.gz"))
&& (!url.toLowerCase().endsWith("ova"))
&& (!url.toLowerCase().endsWith("ova.zip"))
&& (!url.toLowerCase().endsWith("ova.bz2"))
&& (!url.toLowerCase().endsWith("ova.gz"))
&& (!url.toLowerCase().endsWith("img"))
&& (!url.toLowerCase().endsWith("raw"))) {
throw new InvalidParameterValueException("Please specify a valid "
+ format.toLowerCase());
}
if ((format.equalsIgnoreCase("vhd") && (!url.toLowerCase().endsWith(
".vhd")
&& !url.toLowerCase().endsWith("vhd.zip")
&& !url.toLowerCase().endsWith("vhd.bz2") && !url.toLowerCase()
.endsWith("vhd.gz")))
|| (format.equalsIgnoreCase("qcow2") && (!url.toLowerCase()
.endsWith(".qcow2")
&& !url.toLowerCase().endsWith("qcow2.zip")
&& !url.toLowerCase().endsWith("qcow2.bz2") && !url
.toLowerCase().endsWith("qcow2.gz")))
|| (format.equalsIgnoreCase("ova") && (!url.toLowerCase()
.endsWith(".ova")
&& !url.toLowerCase().endsWith("ova.zip")
&& !url.toLowerCase().endsWith("ova.bz2") && !url
.toLowerCase().endsWith("ova.gz")))
|| (format.equalsIgnoreCase("raw") && (!url.toLowerCase()
.endsWith(".img") && !url.toLowerCase().endsWith("raw")))) {
throw new InvalidParameterValueException(
"Please specify a valid URL. URL:" + url
+ " is an invalid for the format "
+ format.toLowerCase());
}
UriUtils.validateUrl(url);
// Check that the resource limit for secondary storage won't be exceeded
_resourceLimitMgr.checkResourceLimit(_accountMgr.getAccount(ownerId), ResourceType.secondary_storage,
UriUtils.getRemoteSize(url));
return false;
}
@Override
public VolumeVO allocateDuplicateVolume(VolumeVO oldVol, Long templateId) {
VolumeVO newVol = new VolumeVO(oldVol.getVolumeType(),
oldVol.getName(), oldVol.getDataCenterId(),
oldVol.getDomainId(), oldVol.getAccountId(),
oldVol.getDiskOfferingId(), oldVol.getSize(),
oldVol.getMinIops(), oldVol.getMaxIops(), oldVol.get_iScsiName());
if (templateId != null) {
newVol.setTemplateId(templateId);
} else {
newVol.setTemplateId(oldVol.getTemplateId());
}
newVol.setDeviceId(oldVol.getDeviceId());
newVol.setInstanceId(oldVol.getInstanceId());
newVol.setRecreatable(oldVol.isRecreatable());
newVol.setFormat(oldVol.getFormat());
return _volsDao.persist(newVol);
}
@DB
protected VolumeInfo createVolumeFromSnapshot(VolumeVO volume,
SnapshotVO snapshot) throws StorageUnavailableException {
Account account = _accountDao.findById(volume.getAccountId());
final HashSet<StoragePool> poolsToAvoid = new HashSet<StoragePool>();
StoragePool pool = null;
Set<Long> podsToAvoid = new HashSet<Long>();
Pair<HostPodVO, Long> pod = null;
DiskOfferingVO diskOffering = _diskOfferingDao
.findByIdIncludingRemoved(volume.getDiskOfferingId());
DataCenterVO dc = _dcDao.findById(volume.getDataCenterId());
DiskProfile dskCh = new DiskProfile(volume, diskOffering,
snapshot.getHypervisorType());
// Determine what pod to store the volume in
while ((pod = _resourceMgr.findPod(null, null, dc, account.getId(),
podsToAvoid)) != null) {
podsToAvoid.add(pod.first().getId());
// Determine what storage pool to store the volume in
while ((pool = storageMgr.findStoragePool(dskCh, dc, pod.first(), null, null,
null, poolsToAvoid)) != null) {
break;
}
}
if (pool == null) {
String msg = "There are no available storage pools to store the volume in";
s_logger.info(msg);
throw new StorageUnavailableException(msg, -1);
}
VolumeInfo vol = volFactory.getVolume(volume.getId());
DataStore store = dataStoreMgr.getDataStore(pool.getId(), DataStoreRole.Primary);
SnapshotInfo snapInfo = snapshotFactory.getSnapshot(snapshot.getId(), DataStoreRole.Image);
AsyncCallFuture<VolumeApiResult> future = volService.createVolumeFromSnapshot(vol, store, snapInfo);
try {
VolumeApiResult result = future.get();
if (result.isFailed()) {
s_logger.debug("Failed to create volume from snapshot:" + result.getResult());
throw new CloudRuntimeException("Failed to create volume from snapshot:" + result.getResult());
}
return result.getVolume();
} catch (InterruptedException e) {
s_logger.debug("Failed to create volume from snapshot", e);
throw new CloudRuntimeException("Failed to create volume from snapshot", e);
} catch (ExecutionException e) {
s_logger.debug("Failed to create volume from snapshot", e);
throw new CloudRuntimeException("Failed to create volume from snapshot", e);
}
}
protected DiskProfile createDiskCharacteristics(VolumeInfo volume,
VMTemplateVO template, DataCenterVO dc, DiskOfferingVO diskOffering) {
if (volume.getVolumeType() == Type.ROOT
&& Storage.ImageFormat.ISO != template.getFormat()) {
TemplateDataStoreVO ss = _vmTemplateStoreDao.findByTemplateZoneDownloadStatus(template.getId(), dc.getId(),
VMTemplateStorageResourceAssoc.Status.DOWNLOADED);
if (ss == null) {
throw new CloudRuntimeException("Template "
+ template.getName()
+ " has not been completely downloaded to zone "
+ dc.getId());
}
return new DiskProfile(volume.getId(), volume.getVolumeType(),
volume.getName(), diskOffering.getId(), ss.getSize(),
diskOffering.getTagsArray(),
diskOffering.getUseLocalStorage(),
diskOffering.isRecreatable(),
Storage.ImageFormat.ISO != template.getFormat() ? template
.getId() : null);
} else {
return new DiskProfile(volume.getId(), volume.getVolumeType(),
volume.getName(), diskOffering.getId(),
diskOffering.getDiskSize(), diskOffering.getTagsArray(),
diskOffering.getUseLocalStorage(),
diskOffering.isRecreatable(), null);
}
}
protected VolumeVO createVolumeFromSnapshot(VolumeVO volume, long snapshotId) throws StorageUnavailableException {
VolumeInfo createdVolume = null;
SnapshotVO snapshot = _snapshotDao.findById(snapshotId);
createdVolume = createVolumeFromSnapshot(volume,
snapshot);
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, createdVolume.getAccountId(), createdVolume.getDataCenterId(), createdVolume.getId(),
createdVolume.getName(), createdVolume.getDiskOfferingId(), null, createdVolume.getSize(), Volume.class.getName(), createdVolume.getUuid());
return _volsDao.findById(createdVolume.getId());
}
@DB
public VolumeInfo copyVolumeFromSecToPrimary(VolumeInfo volume,
VMInstanceVO vm, VMTemplateVO template, DataCenterVO dc,
HostPodVO pod, Long clusterId, ServiceOfferingVO offering,
DiskOfferingVO diskOffering, List<StoragePool> avoids,
long size, HypervisorType hyperType) throws NoTransitionException {
final HashSet<StoragePool> avoidPools = new HashSet<StoragePool>(
avoids);
DiskProfile dskCh = createDiskCharacteristics(volume, template, dc,
diskOffering);
dskCh.setHyperType(vm.getHypervisorType());
// Find a suitable storage to create volume on
StoragePool destPool = storageMgr.findStoragePool(dskCh, dc, pod,
clusterId, null, vm, avoidPools);
DataStore destStore = dataStoreMgr.getDataStore(destPool.getId(), DataStoreRole.Primary);
AsyncCallFuture<VolumeApiResult> future = volService.copyVolume(volume, destStore);
try {
VolumeApiResult result = future.get();
if (result.isFailed()) {
s_logger.debug("copy volume failed: " + result.getResult());
throw new CloudRuntimeException("copy volume failed: " + result.getResult());
}
return result.getVolume();
} catch (InterruptedException e) {
s_logger.debug("Failed to copy volume: " + volume.getId(), e);
throw new CloudRuntimeException("Failed to copy volume", e);
} catch (ExecutionException e) {
s_logger.debug("Failed to copy volume: " + volume.getId(), e);
throw new CloudRuntimeException("Failed to copy volume", e);
}
}
@DB
public VolumeInfo createVolume(VolumeInfo volume, VMInstanceVO vm,
VMTemplateVO template, DataCenterVO dc, HostPodVO pod,
Long clusterId, ServiceOfferingVO offering,
DiskOfferingVO diskOffering, List<StoragePool> avoids,
long size, HypervisorType hyperType) {
StoragePool pool = null;
if (diskOffering != null && diskOffering.isCustomized()) {
diskOffering.setDiskSize(size);
}
DiskProfile dskCh = null;
if (volume.getVolumeType() == Type.ROOT
&& Storage.ImageFormat.ISO != template.getFormat()) {
dskCh = createDiskCharacteristics(volume, template, dc, offering);
} else {
dskCh = createDiskCharacteristics(volume, template, dc,
diskOffering);
}
dskCh.setHyperType(hyperType);
final HashSet<StoragePool> avoidPools = new HashSet<StoragePool>(
avoids);
pool = storageMgr.findStoragePool(dskCh, dc, pod, clusterId, vm.getHostId(),
vm, avoidPools);
if (pool == null) {
s_logger.warn("Unable to find storage pool when create volume "
+ volume.getName());
throw new CloudRuntimeException("Unable to find storage pool when create volume" + volume.getName());
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("Trying to create " + volume + " on " + pool);
}
DataStore store = dataStoreMgr.getDataStore(pool.getId(), DataStoreRole.Primary);
AsyncCallFuture<VolumeApiResult> future = null;
boolean isNotCreatedFromTemplate = volume.getTemplateId() == null ? true : false;
if (isNotCreatedFromTemplate) {
future = volService.createVolumeAsync(volume, store);
} else {
TemplateInfo templ = tmplFactory.getTemplate(template.getId(), DataStoreRole.Image);
future = volService.createVolumeFromTemplateAsync(volume, store.getId(), templ);
}
try {
VolumeApiResult result = future.get();
if (result.isFailed()) {
s_logger.debug("create volume failed: " + result.getResult());
throw new CloudRuntimeException("create volume failed:" + result.getResult());
}
return result.getVolume();
} catch (InterruptedException e) {
s_logger.error("create volume failed", e);
throw new CloudRuntimeException("create volume failed", e);
} catch (ExecutionException e) {
s_logger.error("create volume failed", e);
throw new CloudRuntimeException("create volume failed", e);
}
}
public String getRandomVolumeName() {
return UUID.randomUUID().toString();
}
private VolumeVO persistVolume(Account owner, Long zoneId,
String volumeName, String url, String format) {
Transaction txn = Transaction.currentTxn();
txn.start();
VolumeVO volume = new VolumeVO(volumeName, zoneId, -1, -1, -1,
new Long(-1), null, null, 0, Volume.Type.DATADISK);
volume.setPoolId(null);
volume.setDataCenterId(zoneId);
volume.setPodId(null);
volume.setAccountId(owner.getAccountId());
volume.setDomainId(owner.getDomainId());
long diskOfferingId = _diskOfferingDao.findByUniqueName(
"Cloud.com-Custom").getId();
volume.setDiskOfferingId(diskOfferingId);
// volume.setSize(size);
volume.setInstanceId(null);
volume.setUpdated(new Date());
volume.setDomainId((owner == null) ? Domain.ROOT_DOMAIN : owner
.getDomainId());
volume.setFormat(ImageFormat.valueOf(format));
volume = _volsDao.persist(volume);
UserContext.current().setEventDetails("Volume Id: " + volume.getId());
// Increment resource count during allocation; if actual creation fails,
// decrement it
_resourceLimitMgr.incrementResourceCount(volume.getAccountId(),
ResourceType.volume);
_resourceLimitMgr.incrementResourceCount(volume.getAccountId(), ResourceType.secondary_storage,
UriUtils.getRemoteSize(url));
txn.commit();
return volume;
}
@Override
public boolean volumeOnSharedStoragePool(VolumeVO volume) {
Long poolId = volume.getPoolId();
if (poolId == null) {
return false;
} else {
StoragePoolVO pool = _storagePoolDao.findById(poolId);
if (pool == null) {
return false;
} else {
return (pool.getScope() == ScopeType.HOST) ? false : true;
}
}
}
@Override
public boolean volumeInactive(Volume volume) {
Long vmId = volume.getInstanceId();
if (vmId != null) {
UserVm vm = _userVmDao.findById(vmId);
if (vm == null) {
return true;
}
State state = vm.getState();
if (state.equals(State.Stopped) || state.equals(State.Destroyed)) {
return true;
}
}
return false;
}
@Override
public String getVmNameOnVolume(Volume volume) {
Long vmId = volume.getInstanceId();
if (vmId != null) {
VMInstanceVO vm = _vmInstanceDao.findById(vmId);
if (vm == null) {
return null;
}
return vm.getInstanceName();
}
return null;
}
/*
* Just allocate a volume in the database, don't send the createvolume cmd
* to hypervisor. The volume will be finally created only when it's attached
* to a VM.
*/
@Override
@DB
@ActionEvent(eventType = EventTypes.EVENT_VOLUME_CREATE, eventDescription = "creating volume", create = true)
public VolumeVO allocVolume(CreateVolumeCmd cmd)
throws ResourceAllocationException {
// FIXME: some of the scheduled event stuff might be missing here...
Account caller = UserContext.current().getCaller();
long ownerId = cmd.getEntityOwnerId();
Boolean displayVolumeEnabled = cmd.getDisplayVolume();
// permission check
_accountMgr.checkAccess(caller, null, true,
_accountMgr.getActiveAccountById(ownerId));
// Check that the resource limit for volumes won't be exceeded
_resourceLimitMgr.checkResourceLimit(_accountMgr.getAccount(ownerId),
ResourceType.volume);
Long zoneId = cmd.getZoneId();
Long diskOfferingId = null;
DiskOfferingVO diskOffering = null;
Long size = null;
Long minIops = null;
Long maxIops = null;
// Volume VO used for extracting the source template id
VolumeVO parentVolume = null;
// validate input parameters before creating the volume
if ((cmd.getSnapshotId() == null && cmd.getDiskOfferingId() == null)
|| (cmd.getSnapshotId() != null && cmd.getDiskOfferingId() != null)) {
throw new InvalidParameterValueException(
"Either disk Offering Id or snapshot Id must be passed whilst creating volume");
}
if (cmd.getSnapshotId() == null) {// create a new volume
diskOfferingId = cmd.getDiskOfferingId();
size = cmd.getSize();
Long sizeInGB = size;
if (size != null) {
if (size > 0) {
size = size * 1024 * 1024 * 1024; // user specify size in GB
} else {
throw new InvalidParameterValueException(
"Disk size must be larger than 0");
}
}
// Check that the the disk offering is specified
diskOffering = _diskOfferingDao.findById(diskOfferingId);
if ((diskOffering == null) || diskOffering.getRemoved() != null
|| !DiskOfferingVO.Type.Disk.equals(diskOffering.getType())) {
throw new InvalidParameterValueException(
"Please specify a valid disk offering.");
}
if (diskOffering.isCustomized()) {
if (size == null) {
throw new InvalidParameterValueException(
"This disk offering requires a custom size specified");
}
if ((sizeInGB < _customDiskOfferingMinSize)
|| (sizeInGB > _customDiskOfferingMaxSize)) {
throw new InvalidParameterValueException("Volume size: "
+ sizeInGB + "GB is out of allowed range. Max: "
+ _customDiskOfferingMaxSize + " Min:"
+ _customDiskOfferingMinSize);
}
}
if (!diskOffering.isCustomized() && size != null) {
throw new InvalidParameterValueException(
"This disk offering does not allow custom size");
}
if (diskOffering.getDomainId() == null) {
// do nothing as offering is public
} else {
_configMgr.checkDiskOfferingAccess(caller, diskOffering);
}
if (diskOffering.getDiskSize() > 0) {
size = diskOffering.getDiskSize();
}
Boolean isCustomizedIops = diskOffering.isCustomizedIops();
if (isCustomizedIops != null) {
if (isCustomizedIops) {
minIops = cmd.getMinIops();
maxIops = cmd.getMaxIops();
if (minIops == null && maxIops == null) {
minIops = 0L;
maxIops = 0L;
}
else {
if (minIops == null || minIops <= 0) {
throw new InvalidParameterValueException("The min IOPS must be greater than 0.");
}
if (maxIops == null) {
maxIops = 0L;
}
if (minIops > maxIops) {
throw new InvalidParameterValueException("The min IOPS must be less than or equal to the max IOPS.");
}
}
}
else {
minIops = diskOffering.getMinIops();
maxIops = diskOffering.getMaxIops();
}
}
if (!validateVolumeSizeRange(size)) {// convert size from mb to gb
// for validation
throw new InvalidParameterValueException(
"Invalid size for custom volume creation: " + size
+ " ,max volume size is:" + _maxVolumeSizeInGb);
}
} else { // create volume from snapshot
Long snapshotId = cmd.getSnapshotId();
SnapshotVO snapshotCheck = _snapshotDao.findById(snapshotId);
if (snapshotCheck == null) {
throw new InvalidParameterValueException(
"unable to find a snapshot with id " + snapshotId);
}
if (snapshotCheck.getState() != Snapshot.State.BackedUp) {
throw new InvalidParameterValueException("Snapshot id="
+ snapshotId + " is not in " + Snapshot.State.BackedUp
+ " state yet and can't be used for volume creation");
}
parentVolume = _volsDao.findByIdIncludingRemoved(snapshotCheck.getVolumeId());
diskOfferingId = snapshotCheck.getDiskOfferingId();
diskOffering = _diskOfferingDao.findById(diskOfferingId);
zoneId = snapshotCheck.getDataCenterId();
size = snapshotCheck.getSize(); // ; disk offering is used for tags
// purposes
// check snapshot permissions
_accountMgr.checkAccess(caller, null, true, snapshotCheck);
}
if(displayVolumeEnabled == null){
displayVolumeEnabled = true;
} else{
if(!_accountMgr.isRootAdmin(caller.getType())){
throw new PermissionDeniedException( "Cannot update parameter displayvolume, only admin permitted ");
}
}
// Check that the resource limit for primary storage won't be exceeded
_resourceLimitMgr.checkResourceLimit(_accountMgr.getAccount(ownerId), ResourceType.primary_storage,
new Long(size));
// Verify that zone exists
DataCenterVO zone = _dcDao.findById(zoneId);
if (zone == null) {
throw new InvalidParameterValueException(
"Unable to find zone by id " + zoneId);
}
// Check if zone is disabled
if (Grouping.AllocationState.Disabled == zone.getAllocationState()
&& !_accountMgr.isRootAdmin(caller.getType())) {
throw new PermissionDeniedException(
"Cannot perform this operation, Zone is currently disabled: "
+ zoneId);
}
// If local storage is disabled then creation of volume with local disk
// offering not allowed
if (!zone.isLocalStorageEnabled() && diskOffering.getUseLocalStorage()) {
throw new InvalidParameterValueException(
"Zone is not configured to use local storage but volume's disk offering "
+ diskOffering.getName() + " uses it");
}
String userSpecifiedName = cmd.getVolumeName();
if (userSpecifiedName == null) {
userSpecifiedName = getRandomVolumeName();
}
Transaction txn = Transaction.currentTxn();
txn.start();
VolumeVO volume = new VolumeVO(userSpecifiedName, -1, -1, -1, -1,
new Long(-1), null, null, 0, Volume.Type.DATADISK);
volume.setPoolId(null);
volume.setDataCenterId(zoneId);
volume.setPodId(null);
volume.setAccountId(ownerId);
volume.setDomainId(((caller == null) ? Domain.ROOT_DOMAIN : caller
.getDomainId()));
volume.setDiskOfferingId(diskOfferingId);
volume.setSize(size);
volume.setMinIops(minIops);
volume.setMaxIops(maxIops);
volume.setInstanceId(null);
volume.setUpdated(new Date());
volume.setDomainId((caller == null) ? Domain.ROOT_DOMAIN : caller
.getDomainId());
volume.setDisplayVolume(displayVolumeEnabled);
if (parentVolume != null) {
volume.setTemplateId(parentVolume.getTemplateId());
volume.setFormat(parentVolume.getFormat());
} else {
volume.setTemplateId(null);
}
volume = _volsDao.persist(volume);
if (cmd.getSnapshotId() == null) {
// for volume created from snapshot, create usage event after volume creation
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(), diskOfferingId,
null, size, Volume.class.getName(), volume.getUuid());
}
UserContext.current().setEventDetails("Volume Id: " + volume.getId());
// Increment resource count during allocation; if actual creation fails,
// decrement it
_resourceLimitMgr.incrementResourceCount(volume.getAccountId(),
ResourceType.volume);
_resourceLimitMgr.incrementResourceCount(volume.getAccountId(), ResourceType.primary_storage,
new Long(volume.getSize()));
txn.commit();
return volume;
}
@Override
@DB
@ActionEvent(eventType = EventTypes.EVENT_VOLUME_CREATE, eventDescription = "creating volume", async = true)
public VolumeVO createVolume(CreateVolumeCmd cmd) {
VolumeVO volume = _volsDao.findById(cmd.getEntityId());
boolean created = true;
try {
if (cmd.getSnapshotId() != null) {
volume = createVolumeFromSnapshot(volume, cmd.getSnapshotId());
if (volume.getState() != Volume.State.Ready) {
created = false;
}
}
return volume;
} catch(Exception e) {
created = false;
s_logger.debug("Failed to create volume: " + volume.getId(), e);
return null;
} finally {
if (!created) {
s_logger.trace("Decrementing volume resource count for account id="
+ volume.getAccountId()
+ " as volume failed to create on the backend");
_resourceLimitMgr.decrementResourceCount(volume.getAccountId(),
ResourceType.volume);
_resourceLimitMgr.decrementResourceCount(volume.getAccountId(), ResourceType.primary_storage,
new Long(volume.getSize()));
}
}
}
@Override
@DB
@ActionEvent(eventType = EventTypes.EVENT_VOLUME_RESIZE, eventDescription = "resizing volume", async = true)
public VolumeVO resizeVolume(ResizeVolumeCmd cmd)
throws ResourceAllocationException {
Long newSize = null;
boolean shrinkOk = cmd.getShrinkOk();
VolumeVO volume = _volsDao.findById(cmd.getEntityId());
if (volume == null) {
throw new InvalidParameterValueException("No such volume");
}
DiskOfferingVO diskOffering = _diskOfferingDao.findById(volume
.getDiskOfferingId());
DiskOfferingVO newDiskOffering = null;
newDiskOffering = _diskOfferingDao.findById(cmd.getNewDiskOfferingId());
/*
* Volumes with no hypervisor have never been assigned, and can be
* resized by recreating. perhaps in the future we can just update the
* db entry for the volume
*/
if (_volsDao.getHypervisorType(volume.getId()) == HypervisorType.None) {
throw new InvalidParameterValueException(
"Can't resize a volume that has never been attached, not sure which hypervisor type. Recreate volume to resize.");
}
/* Only works for KVM/Xen for now */
if (_volsDao.getHypervisorType(volume.getId()) != HypervisorType.KVM
&& _volsDao.getHypervisorType(volume.getId()) != HypervisorType.XenServer
&& _volsDao.getHypervisorType(volume.getId()) != HypervisorType.VMware) {
throw new InvalidParameterValueException(
"Cloudstack currently only supports volumes marked as KVM or XenServer hypervisor for resize");
}
if (volume.getState() != Volume.State.Ready) {
throw new InvalidParameterValueException(
"Volume should be in ready state before attempting a resize");
}
if (!volume.getVolumeType().equals(Volume.Type.DATADISK)) {
throw new InvalidParameterValueException(
"Can only resize DATA volumes");
}
/*
* figure out whether or not a new disk offering or size parameter is
* required, get the correct size value
*/
if (newDiskOffering == null) {
if (diskOffering.isCustomized()) {
newSize = cmd.getSize();
if (newSize == null) {
throw new InvalidParameterValueException(
"new offering is of custom size, need to specify a size");
}
newSize = (newSize << 30);
} else {
throw new InvalidParameterValueException("current offering"
+ volume.getDiskOfferingId()
+ " cannot be resized, need to specify a disk offering");
}
} else {
if (newDiskOffering.getRemoved() != null
|| !DiskOfferingVO.Type.Disk.equals(newDiskOffering
.getType())) {
throw new InvalidParameterValueException(
"Disk offering ID is missing or invalid");
}
if (diskOffering.getTags() != null) {
if (!newDiskOffering.getTags().equals(diskOffering.getTags())) {
throw new InvalidParameterValueException(
"Tags on new and old disk offerings must match");
}
} else if (newDiskOffering.getTags() != null) {
throw new InvalidParameterValueException(
"There are no tags on current disk offering, new disk offering needs to have no tags");
}
if (newDiskOffering.getDomainId() == null) {
// do nothing as offering is public
} else {
_configMgr.checkDiskOfferingAccess(UserContext.current()
.getCaller(), newDiskOffering);
}
if (newDiskOffering.isCustomized()) {
newSize = cmd.getSize();
if (newSize == null) {
throw new InvalidParameterValueException(
"new offering is of custom size, need to specify a size");
}
newSize = (newSize << 30);
} else {
newSize = newDiskOffering.getDiskSize();
}
}
if (newSize == null) {
throw new InvalidParameterValueException(
"could not detect a size parameter or fetch one from the diskofferingid parameter");
}
if (!validateVolumeSizeRange(newSize)) {
throw new InvalidParameterValueException(
"Requested size out of range");
}
/* does the caller have the authority to act on this volume? */
_accountMgr.checkAccess(UserContext.current().getCaller(), null, true,
volume);
UserVmVO userVm = _userVmDao.findById(volume.getInstanceId());
long currentSize = volume.getSize();
/*
* lets make certain they (think they) know what they're doing if they
* want to shrink, by forcing them to provide the shrinkok parameter.
* This will be checked again at the hypervisor level where we can see
* the actual disk size
*/
if (currentSize > newSize && !shrinkOk) {
throw new InvalidParameterValueException(
"Going from existing size of "
+ currentSize
+ " to size of "
+ newSize
+ " would shrink the volume, need to sign off by supplying the shrinkok parameter with value of true");
}
if (!shrinkOk) {
/* Check resource limit for this account on primary storage resource */
_resourceLimitMgr.checkResourceLimit(_accountMgr.getAccount(volume.getAccountId()),
ResourceType.primary_storage, new Long(newSize - currentSize));
}
/*
* get a list of hosts to send the commands to, try the system the
* associated vm is running on first, then the last known place it ran.
* If not attached to a userVm, we pass 'none' and resizevolume.sh is ok
* with that since it only needs the vm name to live resize
*/
long[] hosts = null;
String instanceName = "none";
if (userVm != null) {
instanceName = userVm.getInstanceName();
if (userVm.getHostId() != null) {
hosts = new long[] { userVm.getHostId() };
} else if (userVm.getLastHostId() != null) {
hosts = new long[] { userVm.getLastHostId() };
}
/* Xen only works offline, SR does not support VDI.resizeOnline */
if (_volsDao.getHypervisorType(volume.getId()) == HypervisorType.XenServer
&& !userVm.getState().equals(State.Stopped)) {
throw new InvalidParameterValueException(
"VM must be stopped or disk detached in order to resize with the Xen HV");
}
}
ResizeVolumePayload payload = new ResizeVolumePayload(newSize, shrinkOk, instanceName, hosts);
try {
VolumeInfo vol = volFactory.getVolume(volume.getId());
vol.addPayload(payload);
AsyncCallFuture<VolumeApiResult> future = volService.resize(vol);
VolumeApiResult result = future.get();
if (result.isFailed()) {
s_logger.warn("Failed to resize the volume " + volume);
return null;
}
volume = _volsDao.findById(volume.getId());
if (newDiskOffering != null) {
volume.setDiskOfferingId(cmd.getNewDiskOfferingId());
}
_volsDao.update(volume.getId(), volume);
// Log usage event for volumes belonging user VM's only
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_RESIZE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(),
volume.getDiskOfferingId(), volume.getTemplateId(), volume.getSize(), Volume.class.getName(), volume.getUuid());
/* Update resource count for the account on primary storage resource */
if (!shrinkOk) {
_resourceLimitMgr.incrementResourceCount(volume.getAccountId(), ResourceType.primary_storage,
new Long(newSize - currentSize));
} else {
_resourceLimitMgr.decrementResourceCount(volume.getAccountId(), ResourceType.primary_storage,
new Long(currentSize - newSize));
}
return volume;
} catch (InterruptedException e) {
s_logger.warn("failed get resize volume result", e);
} catch (ExecutionException e) {
s_logger.warn("failed get resize volume result", e);
} catch (Exception e) {
s_logger.warn("failed get resize volume result", e);
}
return null;
}
@Override
@DB
@ActionEvent(eventType = EventTypes.EVENT_VOLUME_DELETE, eventDescription = "deleting volume")
public boolean deleteVolume(long volumeId, Account caller)
throws ConcurrentOperationException {
VolumeVO volume = _volsDao.findById(volumeId);
if (volume == null) {
throw new InvalidParameterValueException(
"Unable to aquire volume with ID: " + volumeId);
}
if (!_snapshotMgr.canOperateOnVolume(volume)) {
throw new InvalidParameterValueException(
"There are snapshot creating on it, Unable to delete the volume");
}
_accountMgr.checkAccess(caller, null, true, volume);
if (volume.getInstanceId() != null) {
throw new InvalidParameterValueException(
"Please specify a volume that is not attached to any VM.");
}
if (volume.getState() == Volume.State.UploadOp) {
VolumeDataStoreVO volumeStore = _volumeStoreDao.findByVolume(volume
.getId());
if (volumeStore.getDownloadState() == VMTemplateStorageResourceAssoc.Status.DOWNLOAD_IN_PROGRESS) {
throw new InvalidParameterValueException(
"Please specify a volume that is not uploading");
}
}
try {
if (volume.getState() != Volume.State.Destroy && volume.getState() != Volume.State.Expunging && volume.getState() != Volume.State.Expunging) {
Long instanceId = volume.getInstanceId();
if (!volService.destroyVolume(volume.getId())) {
return false;
}
VMInstanceVO vmInstance = _vmInstanceDao.findById(instanceId);
if (instanceId == null
|| (vmInstance.getType().equals(VirtualMachine.Type.User))) {
// Decrement the resource count for volumes and primary storage belonging user VM's only
_resourceLimitMgr.decrementResourceCount(volume.getAccountId(),
ResourceType.volume);
/* If volume is in primary storage, decrement primary storage count else decrement secondary
storage count (in case of upload volume). */
if (volume.getFolder() != null || volume.getPath() != null || volume.getState() == Volume.State.Allocated) {
_resourceLimitMgr.decrementResourceCount(volume.getAccountId(), ResourceType.primary_storage,
new Long(volume.getSize()));
} else {
_resourceLimitMgr.recalculateResourceCount(volume.getAccountId(), volume.getDomainId(),
ResourceType.secondary_storage.getOrdinal());
}
// Log usage event for volumes belonging user VM's only
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_DELETE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(),
Volume.class.getName(), volume.getUuid());
}
}
// Mark volume as removed if volume has not been created on primary or secondary
if (volume.getState() == Volume.State.Allocated) {
_volsDao.remove(volumeId);
stateTransitTo(volume, Volume.Event.DestroyRequested);
return true;
}
// expunge volume from primary if volume is on primary
VolumeInfo volOnPrimary = volFactory.getVolume(volume.getId(), DataStoreRole.Primary);
if (volOnPrimary != null) {
s_logger.info("Expunging volume " + volume.getId() + " from primary data store");
AsyncCallFuture<VolumeApiResult> future = volService.expungeVolumeAsync(volOnPrimary);
future.get();
}
// expunge volume from secondary if volume is on image store
VolumeInfo volOnSecondary = volFactory.getVolume(volume.getId(), DataStoreRole.Image);
if (volOnSecondary != null) {
s_logger.info("Expunging volume " + volume.getId() + " from secondary data store");
AsyncCallFuture<VolumeApiResult> future2 = volService.expungeVolumeAsync(volOnSecondary);
future2.get();
}
} catch (Exception e) {
s_logger.warn("Failed to expunge volume:", e);
return false;
}
return true;
}
@Override
public boolean validateVolumeSizeRange(long size) {
if (size < 0 || (size > 0 && size < (1024 * 1024 * 1024))) {
throw new InvalidParameterValueException(
"Please specify a size of at least 1 Gb.");
} else if (size > (_maxVolumeSizeInGb * 1024 * 1024 * 1024)) {
throw new InvalidParameterValueException("volume size " + size
+ ", but the maximum size allowed is " + _maxVolumeSizeInGb
+ " Gb.");
}
return true;
}
protected DiskProfile toDiskProfile(VolumeVO vol, DiskOfferingVO offering) {
return new DiskProfile(vol.getId(), vol.getVolumeType(), vol.getName(),
offering.getId(), vol.getSize(), offering.getTagsArray(),
offering.getUseLocalStorage(), offering.isRecreatable(),
vol.getTemplateId());
}
@Override
public DiskProfile allocateRawVolume(Type type,
String name, DiskOfferingVO offering, Long size, VMInstanceVO vm, VMTemplateVO template, Account owner) {
Long isoId=null;
if (size == null) {
size = offering.getDiskSize();
} else {
size = (size * 1024 * 1024 * 1024);
}
VolumeVO vol = new VolumeVO(type, name, vm.getDataCenterId(),
owner.getDomainId(), owner.getId(), offering.getId(), size,
offering.getMinIops(), offering.getMaxIops(), null);
if (vm != null) {
vol.setInstanceId(vm.getId());
}
if (type.equals(Type.ROOT)) {
vol.setDeviceId(0l);
} else {
vol.setDeviceId(1l);
}
if (template.getFormat() == ImageFormat.ISO) {
vol.setIsoId(template.getId());
}
vol.setFormat(getSupportedImageFormatForCluster(vm.getHypervisorType()));
vol = _volsDao.persist(vol);
// Save usage event and update resource count for user vm volumes
if (vm instanceof UserVm) {
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, vol.getAccountId(), vol.getDataCenterId(), vol.getId(), vol.getName(), offering.getId(), null, size,
Volume.class.getName(), vol.getUuid());
_resourceLimitMgr.incrementResourceCount(vm.getAccountId(),
ResourceType.volume);
_resourceLimitMgr.incrementResourceCount(vm.getAccountId(), ResourceType.primary_storage,
new Long(vol.getSize()));
}
return toDiskProfile(vol, offering);
}
@Override
public DiskProfile allocateTemplatedVolume(
Type type, String name, DiskOfferingVO offering,
VMTemplateVO template, VMInstanceVO vm, Account owner) {
assert (template.getFormat() != ImageFormat.ISO) : "ISO is not a template really....";
Long size = _tmpltMgr.getTemplateSize(template.getId(), vm.getDataCenterId());
VolumeVO vol = new VolumeVO(type, name, vm.getDataCenterId(),
owner.getDomainId(), owner.getId(), offering.getId(), size,
offering.getMinIops(), offering.getMaxIops(), null);
vol.setFormat(getSupportedImageFormatForCluster(template.getHypervisorType()));
if (vm != null) {
vol.setInstanceId(vm.getId());
}
vol.setTemplateId(template.getId());
if (type.equals(Type.ROOT)) {
vol.setDeviceId(0l);
if (!vm.getType().equals(VirtualMachine.Type.User)) {
vol.setRecreatable(true);
}
} else {
vol.setDeviceId(1l);
}
vol = _volsDao.persist(vol);
// Create event and update resource count for volumes if vm is a user vm
if (vm instanceof UserVm) {
Long offeringId = null;
if (offering.getType() == DiskOfferingVO.Type.Disk) {
offeringId = offering.getId();
}
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, vol.getAccountId(), vol.getDataCenterId(), vol.getId(), vol.getName(), offeringId, null, size,
Volume.class.getName(), vol.getUuid());
_resourceLimitMgr.incrementResourceCount(vm.getAccountId(),
ResourceType.volume);
_resourceLimitMgr.incrementResourceCount(vm.getAccountId(), ResourceType.primary_storage,
new Long(vol.getSize()));
}
return toDiskProfile(vol, offering);
}
private ImageFormat getSupportedImageFormatForCluster(HypervisorType hyperType) {
if (hyperType == HypervisorType.XenServer) {
return ImageFormat.VHD;
} else if (hyperType == HypervisorType.KVM) {
return ImageFormat.QCOW2;
} else if (hyperType == HypervisorType.VMware) {
return ImageFormat.OVA;
} else if (hyperType == HypervisorType.Ovm) {
return ImageFormat.RAW;
} else {
return null;
}
}
private VolumeInfo copyVolume(StoragePoolVO rootDiskPool
, VolumeInfo volume, VMInstanceVO vm, VMTemplateVO rootDiskTmplt, DataCenterVO dcVO,
HostPodVO pod, DiskOfferingVO diskVO, ServiceOfferingVO svo, HypervisorType rootDiskHyperType) throws NoTransitionException {
if (!volume
.getFormat()
.equals(
getSupportedImageFormatForCluster(rootDiskHyperType))) {
throw new InvalidParameterValueException(
"Failed to attach volume to VM since volumes format "
+ volume.getFormat()
.getFileExtension()
+ " is not compatible with the vm hypervisor type");
}
VolumeInfo volumeOnPrimary = copyVolumeFromSecToPrimary(volume,
vm, rootDiskTmplt, dcVO, pod,
rootDiskPool.getClusterId(), svo, diskVO,
new ArrayList<StoragePool>(),
volume.getSize(), rootDiskHyperType);
return volumeOnPrimary;
}
private VolumeInfo createVolumeOnPrimaryStorage(VMInstanceVO vm, VolumeVO rootVolumeOfVm, VolumeInfo volume, HypervisorType rootDiskHyperType) throws NoTransitionException {
VMTemplateVO rootDiskTmplt = _templateDao.findById(vm
.getTemplateId());
DataCenterVO dcVO = _dcDao.findById(vm
.getDataCenterId());
HostPodVO pod = _podDao.findById(vm.getPodIdToDeployIn());
StoragePoolVO rootDiskPool = _storagePoolDao
.findById(rootVolumeOfVm.getPoolId());
ServiceOfferingVO svo = _serviceOfferingDao.findById(vm
.getServiceOfferingId());
DiskOfferingVO diskVO = _diskOfferingDao.findById(volume
.getDiskOfferingId());
Long clusterId = (rootDiskPool == null ? null : rootDiskPool
.getClusterId());
VolumeInfo vol = null;
if (volume.getState() == Volume.State.Allocated) {
vol = createVolume(volume, vm,
rootDiskTmplt, dcVO, pod, clusterId, svo, diskVO,
new ArrayList<StoragePool>(), volume.getSize(),
rootDiskHyperType);
} else if (volume.getState() == Volume.State.Uploaded) {
vol = copyVolume(rootDiskPool
, volume, vm, rootDiskTmplt, dcVO,
pod, diskVO, svo, rootDiskHyperType);
if (vol != null) {
// Moving of Volume is successful, decrement the volume resource count from secondary for an account and increment it into primary storage under same account.
_resourceLimitMgr.decrementResourceCount(volume.getAccountId(),
ResourceType.secondary_storage, new Long(volume.getSize()));
_resourceLimitMgr.incrementResourceCount(volume.getAccountId(),
ResourceType.primary_storage, new Long(volume.getSize()));
}
}
VolumeVO volVO = _volsDao.findById(vol.getId());
volVO.setFormat(getSupportedImageFormatForCluster(rootDiskHyperType));
_volsDao.update(volVO.getId(), volVO);
return volFactory.getVolume(volVO.getId());
}
private boolean needMoveVolume(VolumeVO rootVolumeOfVm, VolumeInfo volume) {
if (rootVolumeOfVm.getPoolId() == null || volume.getPoolId() == null) {
return false;
}
DataStore storeForRootVol = dataStoreMgr.getPrimaryDataStore(rootVolumeOfVm.getPoolId());
DataStore storeForDataVol = dataStoreMgr.getPrimaryDataStore(volume.getPoolId());
Scope storeForRootStoreScope = storeForRootVol.getScope();
if (storeForRootStoreScope == null) {
throw new CloudRuntimeException("Can't get scope of data store: " + storeForRootVol.getId());
}
Scope storeForDataStoreScope = storeForDataVol.getScope();
if (storeForDataStoreScope == null) {
throw new CloudRuntimeException("Can't get scope of data store: " + storeForDataVol.getId());
}
if (storeForDataStoreScope.getScopeType() == ScopeType.ZONE) {
return false;
}
if (storeForRootStoreScope.getScopeType() != storeForDataStoreScope.getScopeType()) {
if (storeForDataStoreScope.getScopeType() == ScopeType.CLUSTER && storeForRootStoreScope.getScopeType() == ScopeType.HOST) {
HostScope hs = (HostScope)storeForRootStoreScope;
if (storeForDataStoreScope.getScopeId().equals(hs.getClusterId())) {
return false;
}
}
if (storeForRootStoreScope.getScopeType() == ScopeType.CLUSTER && storeForDataStoreScope.getScopeType() == ScopeType.HOST) {
HostScope hs = (HostScope)storeForDataStoreScope;
if (storeForRootStoreScope.getScopeId().equals(hs.getClusterId())) {
return false;
}
}
throw new CloudRuntimeException("Can't move volume between scope: " + storeForDataStoreScope.getScopeType() + " and " + storeForRootStoreScope.getScopeType());
}
return !storeForRootStoreScope.isSameScope(storeForDataStoreScope);
}
private VolumeVO sendAttachVolumeCommand(UserVmVO vm, VolumeVO volumeToAttach, Long deviceId) {
String errorMsg = "Failed to attach volume: " + volumeToAttach.getName()
+ " to VM: " + vm.getHostName();
boolean sendCommand = (vm.getState() == State.Running);
AttachAnswer answer = null;
Long hostId = vm.getHostId();
if (hostId == null) {
hostId = vm.getLastHostId();
HostVO host = _hostDao.findById(hostId);
if (host != null
&& host.getHypervisorType() == HypervisorType.VMware) {
sendCommand = true;
}
}
StoragePoolVO volumeToAttachStoragePool = null;
if (sendCommand) {
volumeToAttachStoragePool = _storagePoolDao.findById(volumeToAttach.getPoolId());
long storagePoolId = volumeToAttachStoragePool.getId();
DataTO volTO = volFactory.getVolume(volumeToAttach.getId()).getTO();
DiskTO disk = new DiskTO(volTO, deviceId, null, volumeToAttach.getVolumeType());
AttachCommand cmd = new AttachCommand(disk, vm.getInstanceName());
cmd.setManaged(volumeToAttachStoragePool.isManaged());
cmd.setStorageHost(volumeToAttachStoragePool.getHostAddress());
cmd.setStoragePort(volumeToAttachStoragePool.getPort());
cmd.set_iScsiName(volumeToAttach.get_iScsiName());
VolumeInfo volumeInfo = volFactory.getVolume(volumeToAttach.getId());
DataStore dataStore = dataStoreMgr.getDataStore(storagePoolId, DataStoreRole.Primary);
ChapInfo chapInfo = volService.getChapInfo(volumeInfo, dataStore);
if (chapInfo != null) {
cmd.setChapInitiatorUsername(chapInfo.getInitiatorUsername());
cmd.setChapInitiatorPassword(chapInfo.getInitiatorSecret());
cmd.setChapTargetUsername(chapInfo.getTargetUsername());
cmd.setChapTargetPassword(chapInfo.getTargetSecret());
}
try {
answer = (AttachAnswer)_agentMgr.send(hostId, cmd);
} catch (Exception e) {
throw new CloudRuntimeException(errorMsg + " due to: "
+ e.getMessage());
}
}
if (!sendCommand || (answer != null && answer.getResult())) {
// Mark the volume as attached
if (sendCommand) {
DiskTO disk = answer.getDisk();
_volsDao.attachVolume(volumeToAttach.getId(), vm.getId(),
disk.getDiskSeq());
volumeToAttach = _volsDao.findById(volumeToAttach.getId());
if (volumeToAttachStoragePool.isManaged() &&
volumeToAttach.getPath() == null) {
volumeToAttach.setPath(answer.getDisk().getVdiUuid());
_volsDao.update(volumeToAttach.getId(), volumeToAttach);
}
} else {
_volsDao.attachVolume(volumeToAttach.getId(), vm.getId(), deviceId);
}
// insert record for disk I/O statistics
VmDiskStatisticsVO diskstats = _vmDiskStatsDao.findBy(vm.getAccountId(), vm.getDataCenterId(),vm.getId(), volumeToAttach.getId());
if (diskstats == null) {
diskstats = new VmDiskStatisticsVO(vm.getAccountId(), vm.getDataCenterId(),vm.getId(), volumeToAttach.getId());
_vmDiskStatsDao.persist(diskstats);
}
return _volsDao.findById(volumeToAttach.getId());
} else {
if (answer != null) {
String details = answer.getDetails();
if (details != null && !details.isEmpty()) {
errorMsg += "; " + details;
}
}
throw new CloudRuntimeException(errorMsg);
}
}
private int getMaxDataVolumesSupported(UserVmVO vm) {
Long hostId = vm.getHostId();
if (hostId == null) {
hostId = vm.getLastHostId();
}
HostVO host = _hostDao.findById(hostId);
Integer maxDataVolumesSupported = null;
if (host != null) {
_hostDao.loadDetails(host);
maxDataVolumesSupported = _hypervisorCapabilitiesDao
.getMaxDataVolumesLimit(host.getHypervisorType(),
host.getDetail("product_version"));
}
if (maxDataVolumesSupported == null) {
maxDataVolumesSupported = 6; // 6 data disks by default if nothing
// is specified in
// 'hypervisor_capabilities' table
}
return maxDataVolumesSupported.intValue();
}
@Override
@ActionEvent(eventType = EventTypes.EVENT_VOLUME_ATTACH, eventDescription = "attaching volume", async = true)
public Volume attachVolumeToVM(AttachVolumeCmd command) {
Long vmId = command.getVirtualMachineId();
Long volumeId = command.getId();
Long deviceId = command.getDeviceId();
Account caller = UserContext.current().getCaller();
// Check that the volume ID is valid
VolumeInfo volume = volFactory.getVolume(volumeId);
// Check that the volume is a data volume
if (volume == null || volume.getVolumeType() != Volume.Type.DATADISK) {
throw new InvalidParameterValueException(
"Please specify a valid data volume.");
}
// Check that the volume is not currently attached to any VM
if (volume.getInstanceId() != null) {
throw new InvalidParameterValueException(
"Please specify a volume that is not attached to any VM.");
}
// Check that the volume is not destroyed
if (volume.getState() == Volume.State.Destroy) {
throw new InvalidParameterValueException(
"Please specify a volume that is not destroyed.");
}
// Check that the virtual machine ID is valid and it's a user vm
UserVmVO vm = _userVmDao.findById(vmId);
if (vm == null || vm.getType() != VirtualMachine.Type.User) {
throw new InvalidParameterValueException(
"Please specify a valid User VM.");
}
// Check that the VM is in the correct state
if (vm.getState() != State.Running && vm.getState() != State.Stopped) {
throw new InvalidParameterValueException(
"Please specify a VM that is either running or stopped.");
}
// Check that the device ID is valid
if (deviceId != null) {
if (deviceId.longValue() == 0) {
throw new InvalidParameterValueException(
"deviceId can't be 0, which is used by Root device");
}
}
// Check that the number of data volumes attached to VM is less than
// that supported by hypervisor
List<VolumeVO> existingDataVolumes = _volsDao.findByInstanceAndType(
vmId, Volume.Type.DATADISK);
int maxDataVolumesSupported = getMaxDataVolumesSupported(vm);
if (existingDataVolumes.size() >= maxDataVolumesSupported) {
throw new InvalidParameterValueException(
"The specified VM already has the maximum number of data disks ("
+ maxDataVolumesSupported
+ "). Please specify another VM.");
}
// Check that the VM and the volume are in the same zone
if (vm.getDataCenterId() != volume.getDataCenterId()) {
throw new InvalidParameterValueException(
"Please specify a VM that is in the same zone as the volume.");
}
// If local storage is disabled then attaching a volume with local disk
// offering not allowed
DataCenterVO dataCenter = _dcDao.findById(volume.getDataCenterId());
if (!dataCenter.isLocalStorageEnabled()) {
DiskOfferingVO diskOffering = _diskOfferingDao.findById(volume
.getDiskOfferingId());
if (diskOffering.getUseLocalStorage()) {
throw new InvalidParameterValueException(
"Zone is not configured to use local storage but volume's disk offering "
+ diskOffering.getName() + " uses it");
}
}
// if target VM has associated VM snapshots
List<VMSnapshotVO> vmSnapshots = _vmSnapshotDao.findByVm(vmId);
if(vmSnapshots.size() > 0){
throw new InvalidParameterValueException(
"Unable to attach volume, please specify a VM that does not have VM snapshots");
}
// permission check
_accountMgr.checkAccess(caller, null, true, volume, vm);
if (!(Volume.State.Allocated.equals(volume.getState())
|| Volume.State.Ready.equals(volume.getState()) || Volume.State.Uploaded
.equals(volume.getState()))) {
throw new InvalidParameterValueException(
"Volume state must be in Allocated, Ready or in Uploaded state");
}
VolumeVO rootVolumeOfVm = null;
List<VolumeVO> rootVolumesOfVm = _volsDao.findByInstanceAndType(vmId,
Volume.Type.ROOT);
if (rootVolumesOfVm.size() != 1) {
throw new CloudRuntimeException(
"The VM "
+ vm.getHostName()
+ " has more than one ROOT volume and is in an invalid state.");
} else {
rootVolumeOfVm = rootVolumesOfVm.get(0);
}
HypervisorType rootDiskHyperType = vm.getHypervisorType();
HypervisorType dataDiskHyperType = _volsDao.getHypervisorType(volume
.getId());
if (dataDiskHyperType != HypervisorType.None
&& rootDiskHyperType != dataDiskHyperType) {
throw new InvalidParameterValueException(
"Can't attach a volume created by: " + dataDiskHyperType
+ " to a " + rootDiskHyperType + " vm");
}
deviceId = getDeviceId(vmId, deviceId);
VolumeInfo volumeOnPrimaryStorage = volume;
if (volume.getState().equals(Volume.State.Allocated)
|| volume.getState() == Volume.State.Uploaded) {
try {
volumeOnPrimaryStorage = createVolumeOnPrimaryStorage(vm, rootVolumeOfVm, volume, rootDiskHyperType);
} catch (NoTransitionException e) {
s_logger.debug("Failed to create volume on primary storage", e);
throw new CloudRuntimeException("Failed to create volume on primary storage", e);
}
}
// reload the volume from db
volumeOnPrimaryStorage = volFactory.getVolume(volumeOnPrimaryStorage.getId());
boolean moveVolumeNeeded = needMoveVolume(rootVolumeOfVm, volumeOnPrimaryStorage);
if (moveVolumeNeeded) {
PrimaryDataStoreInfo primaryStore = (PrimaryDataStoreInfo)volumeOnPrimaryStorage.getDataStore();
if (primaryStore.isLocal()) {
throw new CloudRuntimeException(
"Failed to attach local data volume "
+ volume.getName()
+ " to VM "
+ vm.getDisplayName()
+ " as migration of local data volume is not allowed");
}
StoragePoolVO vmRootVolumePool = _storagePoolDao
.findById(rootVolumeOfVm.getPoolId());
try {
volumeOnPrimaryStorage = moveVolume(volumeOnPrimaryStorage,
vmRootVolumePool.getDataCenterId(),
vmRootVolumePool.getPodId(),
vmRootVolumePool.getClusterId(),
dataDiskHyperType);
} catch (ConcurrentOperationException e) {
s_logger.debug("move volume failed", e);
throw new CloudRuntimeException("move volume failed", e);
}
}
AsyncJobExecutor asyncExecutor = BaseAsyncJobExecutor
.getCurrentExecutor();
if (asyncExecutor != null) {
AsyncJobVO job = asyncExecutor.getJob();
if (s_logger.isInfoEnabled()) {
s_logger.info("Trying to attaching volume " + volumeId
+ " to vm instance:" + vm.getId()
+ ", update async job-" + job.getId() + " = [ " + job.getUuid()
+ " ] progress status");
}
_asyncMgr.updateAsyncJobAttachment(job.getId(), "volume", volumeId);
_asyncMgr.updateAsyncJobStatus(job.getId(),
BaseCmd.PROGRESS_INSTANCE_CREATED, volumeId);
}
VolumeVO newVol = _volumeDao.findById(volumeOnPrimaryStorage.getId());
newVol = sendAttachVolumeCommand(vm, newVol, deviceId);
return newVol;
}
@Override
public Volume updateVolume(UpdateVolumeCmd cmd){
Long volumeId = cmd.getId();
String path = cmd.getPath();
if(path == null){
throw new InvalidParameterValueException("Failed to update the volume as path was null");
}
VolumeVO volume = ApiDBUtils.findVolumeById(volumeId);
volume.setPath(path);
_volumeDao.update(volumeId, volume);
return volume;
}
@Override
@ActionEvent(eventType = EventTypes.EVENT_VOLUME_DETACH, eventDescription = "detaching volume", async = true)
public Volume detachVolumeFromVM(DetachVolumeCmd cmmd) {
Account caller = UserContext.current().getCaller();
if ((cmmd.getId() == null && cmmd.getDeviceId() == null && cmmd
.getVirtualMachineId() == null)
|| (cmmd.getId() != null && (cmmd.getDeviceId() != null || cmmd
.getVirtualMachineId() != null))
|| (cmmd.getId() == null && (cmmd.getDeviceId() == null || cmmd
.getVirtualMachineId() == null))) {
throw new InvalidParameterValueException(
"Please provide either a volume id, or a tuple(device id, instance id)");
}
Long volumeId = cmmd.getId();
VolumeVO volume = null;
if (volumeId != null) {
volume = _volsDao.findById(volumeId);
} else {
volume = _volsDao.findByInstanceAndDeviceId(
cmmd.getVirtualMachineId(), cmmd.getDeviceId()).get(0);
}
Long vmId = null;
if (cmmd.getVirtualMachineId() == null) {
vmId = volume.getInstanceId();
} else {
vmId = cmmd.getVirtualMachineId();
}
// Check that the volume ID is valid
if (volume == null) {
throw new InvalidParameterValueException(
"Unable to find volume with ID: " + volumeId);
}
// Permissions check
_accountMgr.checkAccess(caller, null, true, volume);
// Check that the volume is a data volume
if (volume.getVolumeType() != Volume.Type.DATADISK) {
throw new InvalidParameterValueException(
"Please specify a data volume.");
}
// Check that the volume is currently attached to a VM
if (vmId == null) {
throw new InvalidParameterValueException(
"The specified volume is not attached to a VM.");
}
// Check that the VM is in the correct state
UserVmVO vm = _userVmDao.findById(vmId);
if (vm.getState() != State.Running && vm.getState() != State.Stopped
&& vm.getState() != State.Destroyed) {
throw new InvalidParameterValueException(
"Please specify a VM that is either running or stopped.");
}
// Check if the VM has VM snapshots
List<VMSnapshotVO> vmSnapshots = _vmSnapshotDao.findByVm(vmId);
if(vmSnapshots.size() > 0){
throw new InvalidParameterValueException(
"Unable to detach volume, the specified volume is attached to a VM that has VM snapshots.");
}
AsyncJobExecutor asyncExecutor = BaseAsyncJobExecutor
.getCurrentExecutor();
if (asyncExecutor != null) {
AsyncJobVO job = asyncExecutor.getJob();
if (s_logger.isInfoEnabled()) {
s_logger.info("Trying to attaching volume " + volumeId
+ "to vm instance:" + vm.getId()
+ ", update async job-" + job.getId() + " = [ " + job.getUuid()
+ " ] progress status");
}
_asyncMgr.updateAsyncJobAttachment(job.getId(), "volume", volumeId);
_asyncMgr.updateAsyncJobStatus(job.getId(),
BaseCmd.PROGRESS_INSTANCE_CREATED, volumeId);
}
String errorMsg = "Failed to detach volume: " + volume.getName()
+ " from VM: " + vm.getHostName();
boolean sendCommand = (vm.getState() == State.Running);
Answer answer = null;
if (sendCommand) {
StoragePoolVO volumePool = _storagePoolDao.findById(volume.getPoolId());
DataTO volTO = volFactory.getVolume(volume.getId()).getTO();
DiskTO disk = new DiskTO(volTO, volume.getDeviceId(), null, volume.getVolumeType());
DettachCommand cmd = new DettachCommand(disk, vm.getInstanceName());
cmd.setManaged(volumePool.isManaged());
cmd.setStorageHost(volumePool.getHostAddress());
cmd.setStoragePort(volumePool.getPort());
cmd.set_iScsiName(volume.get_iScsiName());
try {
answer = _agentMgr.send(vm.getHostId(), cmd);
} catch (Exception e) {
throw new CloudRuntimeException(errorMsg + " due to: "
+ e.getMessage());
}
}
if (!sendCommand || (answer != null && answer.getResult())) {
// Mark the volume as detached
_volsDao.detachVolume(volume.getId());
return _volsDao.findById(volumeId);
} else {
if (answer != null) {
String details = answer.getDetails();
if (details != null && !details.isEmpty()) {
errorMsg += "; " + details;
}
}
throw new CloudRuntimeException(errorMsg);
}
}
@DB
protected VolumeVO switchVolume(VolumeVO existingVolume,
VirtualMachineProfile<? extends VirtualMachine> vm)
throws StorageUnavailableException {
Transaction txn = Transaction.currentTxn();
Long templateIdToUse = null;
Long volTemplateId = existingVolume.getTemplateId();
long vmTemplateId = vm.getTemplateId();
if (volTemplateId != null && volTemplateId.longValue() != vmTemplateId) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("switchVolume: Old Volume's templateId: "
+ volTemplateId
+ " does not match the VM's templateId: "
+ vmTemplateId
+ ", updating templateId in the new Volume");
}
templateIdToUse = vmTemplateId;
}
txn.start();
VolumeVO newVolume = allocateDuplicateVolume(existingVolume,
templateIdToUse);
// In case of Vmware if vm reference is not removed then during root
// disk cleanup
// the vm also gets deleted, so remove the reference
if (vm.getHypervisorType() == HypervisorType.VMware) {
_volsDao.detachVolume(existingVolume.getId());
}
try {
stateTransitTo(existingVolume, Volume.Event.DestroyRequested);
} catch (NoTransitionException e) {
s_logger.debug("Unable to destroy existing volume: " + e.toString());
}
txn.commit();
return newVolume;
}
@Override
public void release(VirtualMachineProfile<? extends VMInstanceVO> profile) {
// add code here
}
@Override
@DB
public void cleanupVolumes(long vmId) throws ConcurrentOperationException {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Cleaning storage for vm: " + vmId);
}
List<VolumeVO> volumesForVm = _volsDao.findByInstance(vmId);
List<VolumeVO> toBeExpunged = new ArrayList<VolumeVO>();
Transaction txn = Transaction.currentTxn();
txn.start();
for (VolumeVO vol : volumesForVm) {
if (vol.getVolumeType().equals(Type.ROOT)) {
// Destroy volume if not already destroyed
boolean volumeAlreadyDestroyed = (vol.getState() == Volume.State.Destroy ||
vol.getState() == Volume.State.Expunged ||
vol.getState() == Volume.State.Expunging);
if (!volumeAlreadyDestroyed) {
volService.destroyVolume(vol.getId());
} else {
s_logger.debug("Skipping destroy for the volume " + vol + " as its in state " + vol.getState().toString());
}
toBeExpunged.add(vol);
} else {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Detaching " + vol);
}
_volsDao.detachVolume(vol.getId());
}
}
txn.commit();
AsyncCallFuture<VolumeApiResult> future = null;
for (VolumeVO expunge : toBeExpunged) {
future = volService.expungeVolumeAsync(volFactory.getVolume(expunge.getId()));
try {
future.get();
} catch (InterruptedException e) {
s_logger.debug("failed expunge volume" + expunge.getId(), e);
} catch (ExecutionException e) {
s_logger.debug("failed expunge volume" + expunge.getId(), e);
}
}
}
@DB
@Override
public Volume migrateVolume(MigrateVolumeCmd cmd) {
Long volumeId = cmd.getVolumeId();
Long storagePoolId = cmd.getStoragePoolId();
VolumeVO vol = _volsDao.findById(volumeId);
if (vol == null) {
throw new InvalidParameterValueException(
"Failed to find the volume id: " + volumeId);
}
if (vol.getState() != Volume.State.Ready) {
throw new InvalidParameterValueException(
"Volume must be in ready state");
}
if (storagePoolId == vol.getPoolId()) {
throw new InvalidParameterValueException("Specified destination pool and the current volume storage pool are same");
}
boolean liveMigrateVolume = false;
Long instanceId = vol.getInstanceId();
VMInstanceVO vm = null;
if (instanceId != null) {
vm = _vmInstanceDao.findById(instanceId);
}
if (vm != null && vm.getState() == State.Running) {
// Check if the underlying hypervisor supports storage motion.
Long hostId = vm.getHostId();
if (hostId != null) {
HostVO host = _hostDao.findById(hostId);
HypervisorCapabilitiesVO capabilities = null;
if (host != null) {
capabilities = _hypervisorCapabilitiesDao.findByHypervisorTypeAndVersion(host.getHypervisorType(),
host.getHypervisorVersion());
}
if (capabilities != null) {
liveMigrateVolume = capabilities.isStorageMotionSupported();
}
}
}
if (liveMigrateVolume && !cmd.isLiveMigrate()) {
throw new InvalidParameterValueException("The volume " + vol + "is attached to a vm and for migrating it " +
"the parameter livemigrate should be specified");
}
StoragePool destPool = (StoragePool)dataStoreMgr.getDataStore(storagePoolId, DataStoreRole.Primary);
if (destPool == null) {
throw new InvalidParameterValueException(
"Failed to find the destination storage pool: "
+ storagePoolId);
}
if (!volumeOnSharedStoragePool(vol)) {
throw new InvalidParameterValueException(
"Migration of volume from local storage pool is not supported");
}
Volume newVol = null;
if (liveMigrateVolume) {
newVol = liveMigrateVolume(vol, destPool);
} else {
newVol = migrateVolume(vol, destPool);
}
return newVol;
}
@DB
protected Volume migrateVolume(Volume volume, StoragePool destPool) {
VolumeInfo vol = volFactory.getVolume(volume.getId());
AsyncCallFuture<VolumeApiResult> future = volService.copyVolume(vol, (DataStore)destPool);
try {
VolumeApiResult result = future.get();
if (result.isFailed()) {
s_logger.error("migrate volume failed:" + result.getResult());
return null;
}
return result.getVolume();
} catch (InterruptedException e) {
s_logger.debug("migrate volume failed", e);
return null;
} catch (ExecutionException e) {
s_logger.debug("migrate volume failed", e);
return null;
}
}
@DB
protected Volume liveMigrateVolume(Volume volume, StoragePool destPool) {
VolumeInfo vol = volFactory.getVolume(volume.getId());
AsyncCallFuture<VolumeApiResult> future = volService.migrateVolume(vol, (DataStore)destPool);
try {
VolumeApiResult result = future.get();
if (result.isFailed()) {
s_logger.debug("migrate volume failed:" + result.getResult());
return null;
}
return result.getVolume();
} catch (InterruptedException e) {
s_logger.debug("migrate volume failed", e);
return null;
} catch (ExecutionException e) {
s_logger.debug("migrate volume failed", e);
return null;
}
}
@Override
public <T extends VMInstanceVO> void migrateVolumes(T vm, VirtualMachineTO vmTo, Host srcHost, Host destHost,
Map<VolumeVO, StoragePoolVO> volumeToPool) {
// Check if all the vms being migrated belong to the vm.
// Check if the storage pool is of the right type.
// Create a VolumeInfo to DataStore map too.
Map<VolumeInfo, DataStore> volumeMap = new HashMap<VolumeInfo, DataStore>();
for (Map.Entry<VolumeVO, StoragePoolVO> entry : volumeToPool.entrySet()) {
VolumeVO volume = entry.getKey();
StoragePoolVO storagePool = entry.getValue();
StoragePool destPool = (StoragePool)dataStoreMgr.getDataStore(storagePool.getId(),
DataStoreRole.Primary);
if (volume.getInstanceId() != vm.getId()) {
throw new CloudRuntimeException("Volume " + volume + " that has to be migrated doesn't belong to the" +
" instance " + vm);
}
if (destPool == null) {
throw new CloudRuntimeException("Failed to find the destination storage pool " + storagePool.getId());
}
volumeMap.put(volFactory.getVolume(volume.getId()), (DataStore)destPool);
}
AsyncCallFuture<CommandResult> future = volService.migrateVolumes(volumeMap, vmTo, srcHost, destHost);
try {
CommandResult result = future.get();
if (result.isFailed()) {
s_logger.debug("Failed to migrated vm " + vm + " along with its volumes. " + result.getResult());
throw new CloudRuntimeException("Failed to migrated vm " + vm + " along with its volumes. " +
result.getResult());
}
} catch (InterruptedException e) {
s_logger.debug("Failed to migrated vm " + vm + " along with its volumes.", e);
} catch (ExecutionException e) {
s_logger.debug("Failed to migrated vm " + vm + " along with its volumes.", e);
}
}
@Override
public boolean storageMigration(
VirtualMachineProfile<? extends VirtualMachine> vm,
StoragePool destPool) {
List<VolumeVO> vols = _volsDao.findUsableVolumesForInstance(vm.getId());
List<Volume> volumesNeedToMigrate = new ArrayList<Volume>();
for (VolumeVO volume : vols) {
if (volume.getState() != Volume.State.Ready) {
s_logger.debug("volume: " + volume.getId() + " is in "
+ volume.getState() + " state");
throw new CloudRuntimeException("volume: " + volume.getId()
+ " is in " + volume.getState() + " state");
}
if (volume.getPoolId() == destPool.getId()) {
s_logger.debug("volume: " + volume.getId()
+ " is on the same storage pool: " + destPool.getId());
continue;
}
volumesNeedToMigrate.add(volume);
}
if (volumesNeedToMigrate.isEmpty()) {
s_logger.debug("No volume need to be migrated");
return true;
}
for (Volume vol : volumesNeedToMigrate) {
Volume result = migrateVolume(vol, destPool);
if (result == null) {
return false;
}
}
return true;
}
@Override
public void prepareForMigration(
VirtualMachineProfile<? extends VirtualMachine> vm,
DeployDestination dest) {
List<VolumeVO> vols = _volsDao.findUsableVolumesForInstance(vm.getId());
if (s_logger.isDebugEnabled()) {
s_logger.debug("Preparing " + vols.size() + " volumes for " + vm);
}
for (VolumeVO vol : vols) {
DataTO volTO = volFactory.getVolume(vol.getId()).getTO();
DiskTO disk = new DiskTO(volTO, vol.getDeviceId(), null, vol.getVolumeType());
vm.addDisk(disk);
}
if (vm.getType() == VirtualMachine.Type.User && vm.getTemplate().getFormat() == ImageFormat.ISO) {
DataTO dataTO = tmplFactory.getTemplate(vm.getTemplate().getId(), DataStoreRole.Image, vm.getVirtualMachine().getDataCenterId()).getTO();
DiskTO iso = new DiskTO(dataTO, 3L, null, Volume.Type.ISO);
vm.addDisk(iso);
}
}
private static enum VolumeTaskType {
RECREATE,
NOP,
MIGRATE
}
private static class VolumeTask {
final VolumeTaskType type;
final StoragePoolVO pool;
final VolumeVO volume;
VolumeTask(VolumeTaskType type, VolumeVO volume, StoragePoolVO pool) {
this.type = type;
this.pool = pool;
this.volume = volume;
}
}
private List<VolumeTask> getTasks(List<VolumeVO> vols, Map<Volume, StoragePool> destVols) throws StorageUnavailableException {
boolean recreate = _recreateSystemVmEnabled;
List<VolumeTask> tasks = new ArrayList<VolumeTask>();
for (VolumeVO vol : vols) {
StoragePoolVO assignedPool = null;
if (destVols != null) {
StoragePool pool = destVols.get(vol);
if (pool != null) {
assignedPool = _storagePoolDao.findById(pool.getId());
}
}
if (assignedPool == null && recreate) {
assignedPool = _storagePoolDao.findById(vol.getPoolId());
}
if (assignedPool != null || recreate) {
Volume.State state = vol.getState();
if (state == Volume.State.Allocated
|| state == Volume.State.Creating) {
VolumeTask task = new VolumeTask(VolumeTaskType.RECREATE, vol, null);
tasks.add(task);
} else {
if (vol.isRecreatable()) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Volume " + vol
+ " will be recreated on storage pool "
+ assignedPool
+ " assigned by deploymentPlanner");
}
VolumeTask task = new VolumeTask(VolumeTaskType.RECREATE, vol, null);
tasks.add(task);
} else {
if (assignedPool.getId() != vol.getPoolId()) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Mismatch in storage pool "
+ assignedPool
+ " assigned by deploymentPlanner and the one associated with volume "
+ vol);
}
DiskOfferingVO diskOffering = _diskOfferingDao
.findById(vol.getDiskOfferingId());
if (diskOffering.getUseLocalStorage()) {
// Currently migration of local volume is not supported so bail out
if (s_logger.isDebugEnabled()) {
s_logger.debug("Local volume "
+ vol
+ " cannot be recreated on storagepool "
+ assignedPool
+ " assigned by deploymentPlanner");
}
throw new CloudRuntimeException("Local volume " + vol + " cannot be recreated on storagepool " + assignedPool + " assigned by deploymentPlanner");
} else {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Shared volume "
+ vol
+ " will be migrated on storage pool "
+ assignedPool
+ " assigned by deploymentPlanner");
}
VolumeTask task = new VolumeTask(VolumeTaskType.MIGRATE, vol, assignedPool);
tasks.add(task);
}
} else {
StoragePoolVO pool = _storagePoolDao
.findById(vol.getPoolId());
VolumeTask task = new VolumeTask(VolumeTaskType.NOP, vol, pool);
tasks.add(task);
}
}
}
} else {
if (vol.getPoolId() == null) {
throw new StorageUnavailableException(
"Volume has no pool associate and also no storage pool assigned in DeployDestination, Unable to create "
+ vol, Volume.class, vol.getId());
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("No need to recreate the volume: " + vol
+ ", since it already has a pool assigned: "
+ vol.getPoolId() + ", adding disk to VM");
}
StoragePoolVO pool = _storagePoolDao.findById(vol
.getPoolId());
VolumeTask task = new VolumeTask(VolumeTaskType.NOP, vol, pool);
tasks.add(task);
}
}
return tasks;
}
private Pair<VolumeVO, DataStore> recreateVolume(VolumeVO vol, VirtualMachineProfile<? extends VirtualMachine> vm,
DeployDestination dest) throws StorageUnavailableException {
VolumeVO newVol;
boolean recreate = _recreateSystemVmEnabled;
DataStore destPool = null;
if (recreate
&& (dest.getStorageForDisks() == null || dest
.getStorageForDisks().get(vol) == null)) {
destPool = dataStoreMgr.getDataStore(vol.getPoolId(), DataStoreRole.Primary);
s_logger.debug("existing pool: " + destPool.getId());
} else {
StoragePool pool = dest.getStorageForDisks().get(vol);
destPool = dataStoreMgr.getDataStore(pool.getId(), DataStoreRole.Primary);
}
if (vol.getState() == Volume.State.Allocated
|| vol.getState() == Volume.State.Creating) {
newVol = vol;
} else {
newVol = switchVolume(vol, vm);
// update the volume->PrimaryDataStoreVO map since volumeId has
// changed
if (dest.getStorageForDisks() != null
&& dest.getStorageForDisks().containsKey(vol)) {
StoragePool poolWithOldVol = dest
.getStorageForDisks().get(vol);
dest.getStorageForDisks().put(newVol, poolWithOldVol);
dest.getStorageForDisks().remove(vol);
}
if (s_logger.isDebugEnabled()) {
s_logger.debug("Created new volume " + newVol
+ " for old volume " + vol);
}
}
VolumeInfo volume = volFactory.getVolume(newVol.getId(), destPool);
Long templateId = newVol.getTemplateId();
AsyncCallFuture<VolumeApiResult> future = null;
if (templateId == null) {
future = volService.createVolumeAsync(volume, destPool);
} else {
TemplateInfo templ = tmplFactory.getTemplate(templateId, DataStoreRole.Image);
future = volService.createVolumeFromTemplateAsync(volume, destPool.getId(), templ);
}
VolumeApiResult result = null;
try {
result = future.get();
if (result.isFailed()) {
s_logger.debug("Unable to create "
+ newVol + ":" + result.getResult());
throw new StorageUnavailableException("Unable to create "
+ newVol + ":" + result.getResult(), destPool.getId());
}
newVol = _volsDao.findById(newVol.getId());
} catch (InterruptedException e) {
s_logger.error("Unable to create " + newVol, e);
throw new StorageUnavailableException("Unable to create "
+ newVol + ":" + e.toString(), destPool.getId());
} catch (ExecutionException e) {
s_logger.error("Unable to create " + newVol, e);
throw new StorageUnavailableException("Unable to create "
+ newVol + ":" + e.toString(), destPool.getId());
}
return new Pair<VolumeVO, DataStore>(newVol, destPool);
}
@Override
public void prepare(VirtualMachineProfile<? extends VirtualMachine> vm,
DeployDestination dest) throws StorageUnavailableException,
InsufficientStorageCapacityException, ConcurrentOperationException {
if (dest == null) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("DeployDestination cannot be null, cannot prepare Volumes for the vm: "
+ vm);
}
throw new CloudRuntimeException(
"Unable to prepare Volume for vm because DeployDestination is null, vm:"
+ vm);
}
List<VolumeVO> vols = _volsDao.findUsableVolumesForInstance(vm.getId());
if (s_logger.isDebugEnabled()) {
s_logger.debug("Checking if we need to prepare " + vols.size()
+ " volumes for " + vm);
}
List<VolumeTask> tasks = getTasks(vols, dest.getStorageForDisks());
Volume vol = null;
StoragePool pool = null;
for (VolumeTask task : tasks) {
if (task.type == VolumeTaskType.NOP) {
pool = (StoragePool)dataStoreMgr.getDataStore(task.pool.getId(), DataStoreRole.Primary);
vol = task.volume;
} else if (task.type == VolumeTaskType.MIGRATE) {
pool = (StoragePool)dataStoreMgr.getDataStore(task.pool.getId(), DataStoreRole.Primary);
migrateVolume(task.volume, pool);
vol = task.volume;
} else if (task.type == VolumeTaskType.RECREATE) {
Pair<VolumeVO, DataStore> result = recreateVolume(task.volume, vm, dest);
pool = (StoragePool)dataStoreMgr.getDataStore(result.second().getId(), DataStoreRole.Primary);
vol = result.first();
}
DataTO volumeTO = volFactory.getVolume(vol.getId()).getTO();
DiskTO disk = new DiskTO(volumeTO, vol.getDeviceId(), null, vol.getVolumeType());
vm.addDisk(disk);
}
}
private Long getDeviceId(long vmId, Long deviceId) {
// allocate deviceId
List<VolumeVO> vols = _volsDao.findByInstance(vmId);
if (deviceId != null) {
if (deviceId.longValue() > 15 || deviceId.longValue() == 0
|| deviceId.longValue() == 3) {
throw new RuntimeException("deviceId should be 1,2,4-15");
}
for (VolumeVO vol : vols) {
if (vol.getDeviceId().equals(deviceId)) {
throw new RuntimeException("deviceId " + deviceId
+ " is used by vm" + vmId);
}
}
} else {
// allocate deviceId here
List<String> devIds = new ArrayList<String>();
for (int i = 1; i < 15; i++) {
devIds.add(String.valueOf(i));
}
devIds.remove("3");
for (VolumeVO vol : vols) {
devIds.remove(vol.getDeviceId().toString().trim());
}
deviceId = Long.parseLong(devIds.iterator().next());
}
return deviceId;
}
private boolean stateTransitTo(Volume vol, Volume.Event event)
throws NoTransitionException {
return _volStateMachine.transitTo(vol, event, null, _volsDao);
}
@Override
public boolean canVmRestartOnAnotherServer(long vmId) {
List<VolumeVO> vols = _volsDao.findCreatedByInstance(vmId);
for (VolumeVO vol : vols) {
if (!vol.isRecreatable() && !vol.getPoolType().isShared()) {
return false;
}
}
return true;
}
@Override
public boolean configure(String name, Map<String, Object> params)
throws ConfigurationException {
String _customDiskOfferingMinSizeStr = _configDao
.getValue(Config.CustomDiskOfferingMinSize.toString());
_customDiskOfferingMinSize = NumbersUtil.parseInt(
_customDiskOfferingMinSizeStr, Integer
.parseInt(Config.CustomDiskOfferingMinSize
.getDefaultValue()));
String maxVolumeSizeInGbString = _configDao
.getValue("storage.max.volume.size");
_maxVolumeSizeInGb = NumbersUtil.parseLong(maxVolumeSizeInGbString,
2000);
String value = _configDao.getValue(Config.RecreateSystemVmEnabled.key());
_recreateSystemVmEnabled = Boolean.parseBoolean(value);
_copyvolumewait = NumbersUtil.parseInt(value,
Integer.parseInt(Config.CopyVolumeWait.getDefaultValue()));
return true;
}
@Override
public boolean start() {
return true;
}
@Override
public boolean stop() {
return true;
}
@Override
public String getName() {
return "Volume Manager";
}
@Override
public void destroyVolume(VolumeVO volume) {
try {
volService.destroyVolume(volume.getId());
} catch (ConcurrentOperationException e) {
s_logger.debug("Failed to destroy volume" + volume.getId(), e);
throw new CloudRuntimeException("Failed to destroy volume" + volume.getId(), e);
}
}
@Override
public Snapshot takeSnapshot(Long volumeId, Long policyId, Long snapshotId, Account account) throws ResourceAllocationException {
VolumeInfo volume = volFactory.getVolume(volumeId);
if (volume == null) {
throw new InvalidParameterValueException("Creating snapshot failed due to volume:" + volumeId + " doesn't exist");
}
if (volume.getState() != Volume.State.Ready) {
throw new InvalidParameterValueException("VolumeId: " + volumeId + " is not in " + Volume.State.Ready + " state but " + volume.getState() + ". Cannot take snapshot.");
}
CreateSnapshotPayload payload = new CreateSnapshotPayload();
payload.setSnapshotId(snapshotId);
payload.setSnapshotPolicyId(policyId);
payload.setAccount(account);
volume.addPayload(payload);
return volService.takeSnapshot(volume);
}
@Override
public Snapshot allocSnapshot(Long volumeId, Long policyId) throws ResourceAllocationException {
Account caller = UserContext.current().getCaller();
VolumeInfo volume = volFactory.getVolume(volumeId);
if (volume == null) {
throw new InvalidParameterValueException("Creating snapshot failed due to volume:" + volumeId + " doesn't exist");
}
DataCenter zone = _dcDao.findById(volume.getDataCenterId());
if (zone == null) {
throw new InvalidParameterValueException("Can't find zone by id " + volume.getDataCenterId());
}
if (Grouping.AllocationState.Disabled == zone.getAllocationState() && !_accountMgr.isRootAdmin(caller.getType())) {
throw new PermissionDeniedException("Cannot perform this operation, Zone is currently disabled: " + zone.getName());
}
if (volume.getState() != Volume.State.Ready) {
throw new InvalidParameterValueException("VolumeId: " + volumeId + " is not in " + Volume.State.Ready + " state but " + volume.getState() + ". Cannot take snapshot.");
}
if ( volume.getTemplateId() != null ) {
VMTemplateVO template = _templateDao.findById(volume.getTemplateId());
if( template != null && template.getTemplateType() == Storage.TemplateType.SYSTEM ) {
throw new InvalidParameterValueException("VolumeId: " + volumeId + " is for System VM , Creating snapshot against System VM volumes is not supported");
}
}
StoragePool storagePool = (StoragePool)volume.getDataStore();
if (storagePool == null) {
throw new InvalidParameterValueException("VolumeId: " + volumeId + " please attach this volume to a VM before create snapshot for it");
}
return snapshotMgr.allocSnapshot(volumeId, policyId);
}
@Override
@ActionEvent(eventType = EventTypes.EVENT_VOLUME_EXTRACT, eventDescription = "extracting volume", async = true)
public String extractVolume(ExtractVolumeCmd cmd) {
Long volumeId = cmd.getId();
Long zoneId = cmd.getZoneId();
String mode = cmd.getMode();
Account account = UserContext.current().getCaller();
if (!_accountMgr.isRootAdmin(account.getType()) && ApiDBUtils.isExtractionDisabled()) {
throw new PermissionDeniedException("Extraction has been disabled by admin");
}
VolumeVO volume = _volumeDao.findById(volumeId);
if (volume == null) {
InvalidParameterValueException ex = new InvalidParameterValueException("Unable to find volume with specified volumeId");
ex.addProxyObject(volumeId.toString(), "volumeId");
throw ex;
}
// perform permission check
_accountMgr.checkAccess(account, null, true, volume);
if (_dcDao.findById(zoneId) == null) {
throw new InvalidParameterValueException("Please specify a valid zone.");
}
if (volume.getPoolId() == null) {
throw new InvalidParameterValueException("The volume doesnt belong to a storage pool so cant extract it");
}
// Extract activity only for detached volumes or for volumes whose
// instance is stopped
if (volume.getInstanceId() != null && ApiDBUtils.findVMInstanceById(volume.getInstanceId()).getState() != State.Stopped) {
s_logger.debug("Invalid state of the volume with ID: " + volumeId
+ ". It should be either detached or the VM should be in stopped state.");
PermissionDeniedException ex = new PermissionDeniedException(
"Invalid state of the volume with specified ID. It should be either detached or the VM should be in stopped state.");
ex.addProxyObject(volume.getUuid(), "volumeId");
throw ex;
}
if (volume.getVolumeType() != Volume.Type.DATADISK) {
// Datadisk dont have any template dependence.
VMTemplateVO template = ApiDBUtils.findTemplateById(volume.getTemplateId());
if (template != null) { // For ISO based volumes template = null and
// we allow extraction of all ISO based
// volumes
boolean isExtractable = template.isExtractable() && template.getTemplateType() != Storage.TemplateType.SYSTEM;
if (!isExtractable && account != null && account.getType() != Account.ACCOUNT_TYPE_ADMIN) {
// Global admins are always allowed to extract
PermissionDeniedException ex = new PermissionDeniedException("The volume with specified volumeId is not allowed to be extracted");
ex.addProxyObject(volume.getUuid(), "volumeId");
throw ex;
}
}
}
Upload.Mode extractMode;
if (mode == null || (!mode.equals(Upload.Mode.FTP_UPLOAD.toString()) && !mode.equals(Upload.Mode.HTTP_DOWNLOAD.toString()))) {
throw new InvalidParameterValueException("Please specify a valid extract Mode ");
} else {
extractMode = mode.equals(Upload.Mode.FTP_UPLOAD.toString()) ? Upload.Mode.FTP_UPLOAD : Upload.Mode.HTTP_DOWNLOAD;
}
// Check if the url already exists
VolumeDataStoreVO volumeStoreRef = _volumeStoreDao.findByVolume(volumeId);
if(volumeStoreRef != null && volumeStoreRef.getExtractUrl() != null){
return volumeStoreRef.getExtractUrl();
}
// Clean up code to remove all those previous uploadVO and uploadMonitor code. Previous code is trying to fake an async operation purely in
// db table with uploadVO and async_job entry, but internal implementation is actually synchronous.
ImageStoreEntity secStore = (ImageStoreEntity) dataStoreMgr.getImageStore(zoneId);
// Copy volume from primary to secondary storage
VolumeInfo srcVol = volFactory.getVolume(volume.getId());
AsyncCallFuture<VolumeApiResult> cvAnswer = volService.copyVolume(srcVol, secStore);
// Check if you got a valid answer.
VolumeApiResult cvResult = null;
try {
cvResult = cvAnswer.get();
} catch (InterruptedException e1) {
s_logger.debug("failed copy volume", e1);
throw new CloudRuntimeException("Failed to copy volume", e1);
} catch (ExecutionException e1) {
s_logger.debug("failed copy volume", e1);
throw new CloudRuntimeException("Failed to copy volume", e1);
}
if (cvResult == null || cvResult.isFailed()) {
String errorString = "Failed to copy the volume from the source primary storage pool to secondary storage.";
throw new CloudRuntimeException(errorString);
}
VolumeInfo vol = cvResult.getVolume();
String extractUrl = secStore.createEntityExtractUrl(vol.getPath(), vol.getFormat(), vol);
volumeStoreRef = _volumeStoreDao.findByVolume(volumeId);
volumeStoreRef.setExtractUrl(extractUrl);
volumeStoreRef.setExtractUrlCreated(DateUtil.now());
_volumeStoreDao.update(volumeStoreRef.getId(), volumeStoreRef);
return extractUrl;
}
private String getFormatForPool(StoragePool pool) {
ClusterVO cluster = ApiDBUtils.findClusterById(pool.getClusterId());
if (cluster.getHypervisorType() == HypervisorType.XenServer) {
return "vhd";
} else if (cluster.getHypervisorType() == HypervisorType.KVM) {
return "qcow2";
} else if (cluster.getHypervisorType() == HypervisorType.VMware) {
return "ova";
} else if (cluster.getHypervisorType() == HypervisorType.Ovm) {
return "raw";
} else {
return null;
}
}
@Override
public String getVmNameFromVolumeId(long volumeId) {
VolumeVO volume = _volsDao.findById(volumeId);
return getVmNameOnVolume(volume);
}
@Override
public String getStoragePoolOfVolume(long volumeId) {
VolumeVO vol = _volsDao.findById(volumeId);
return dataStoreMgr.getPrimaryDataStore(vol.getPoolId()).getUuid();
}
}
| CLOUDSTACK-4244. Unable to attach a volume to a VM deployed in Stopped (startvm=false) state.
While attaching a volume to a VM, create volume on the hypervisor only if the VM has been started on the it.
| server/src/com/cloud/storage/VolumeManagerImpl.java | CLOUDSTACK-4244. Unable to attach a volume to a VM deployed in Stopped (startvm=false) state. While attaching a volume to a VM, create volume on the hypervisor only if the VM has been started on the it. | <ide><path>erver/src/com/cloud/storage/VolumeManagerImpl.java
<ide>
<ide> deviceId = getDeviceId(vmId, deviceId);
<ide> VolumeInfo volumeOnPrimaryStorage = volume;
<del> if (volume.getState().equals(Volume.State.Allocated)
<del> || volume.getState() == Volume.State.Uploaded) {
<del> try {
<del> volumeOnPrimaryStorage = createVolumeOnPrimaryStorage(vm, rootVolumeOfVm, volume, rootDiskHyperType);
<del> } catch (NoTransitionException e) {
<del> s_logger.debug("Failed to create volume on primary storage", e);
<del> throw new CloudRuntimeException("Failed to create volume on primary storage", e);
<del> }
<del> }
<del>
<del> // reload the volume from db
<del> volumeOnPrimaryStorage = volFactory.getVolume(volumeOnPrimaryStorage.getId());
<del> boolean moveVolumeNeeded = needMoveVolume(rootVolumeOfVm, volumeOnPrimaryStorage);
<del>
<del> if (moveVolumeNeeded) {
<del> PrimaryDataStoreInfo primaryStore = (PrimaryDataStoreInfo)volumeOnPrimaryStorage.getDataStore();
<del> if (primaryStore.isLocal()) {
<del> throw new CloudRuntimeException(
<del> "Failed to attach local data volume "
<del> + volume.getName()
<del> + " to VM "
<del> + vm.getDisplayName()
<del> + " as migration of local data volume is not allowed");
<del> }
<del> StoragePoolVO vmRootVolumePool = _storagePoolDao
<del> .findById(rootVolumeOfVm.getPoolId());
<del>
<del> try {
<del> volumeOnPrimaryStorage = moveVolume(volumeOnPrimaryStorage,
<del> vmRootVolumePool.getDataCenterId(),
<del> vmRootVolumePool.getPodId(),
<del> vmRootVolumePool.getClusterId(),
<del> dataDiskHyperType);
<del> } catch (ConcurrentOperationException e) {
<del> s_logger.debug("move volume failed", e);
<del> throw new CloudRuntimeException("move volume failed", e);
<del> }
<del> }
<del>
<add>
<add> // Check if volume is stored on secondary storage
<add> boolean isVolumeOnSec = false;
<add> VolumeInfo volOnSecondary = volFactory.getVolume(volume.getId(), DataStoreRole.Image);
<add> if (volOnSecondary != null) {
<add> isVolumeOnSec = true;
<add> if(volOnSecondary.getState() != Volume.State.Uploaded) {
<add> throw new InvalidParameterValueException("Volume is not uploaded yet. Please try this operation once the volume is uploaded");
<add> }
<add> }
<add>
<add> boolean createVolumeOnBackend = true;
<add> if (rootVolumeOfVm.getState() == Volume.State.Allocated) {
<add> createVolumeOnBackend = false;
<add> if(isVolumeOnSec) {
<add> throw new CloudRuntimeException("Cant attach uploaded volume to the vm which is not created. Please start it and then retry");
<add> }
<add> }
<add>
<add> // Create volume on the backend only when VM's root volume is allocated
<add> if (createVolumeOnBackend) {
<add> if (volume.getState().equals(Volume.State.Allocated)
<add> || volume.getState() == Volume.State.Uploaded) {
<add> try {
<add> volumeOnPrimaryStorage = createVolumeOnPrimaryStorage(vm, rootVolumeOfVm, volume, rootDiskHyperType);
<add> } catch (NoTransitionException e) {
<add> s_logger.debug("Failed to create volume on primary storage", e);
<add> throw new CloudRuntimeException("Failed to create volume on primary storage", e);
<add> }
<add> }
<add>
<add> // reload the volume from db
<add> volumeOnPrimaryStorage = volFactory.getVolume(volumeOnPrimaryStorage.getId());
<add> boolean moveVolumeNeeded = needMoveVolume(rootVolumeOfVm, volumeOnPrimaryStorage);
<add>
<add> if (moveVolumeNeeded) {
<add> PrimaryDataStoreInfo primaryStore = (PrimaryDataStoreInfo)volumeOnPrimaryStorage.getDataStore();
<add> if (primaryStore.isLocal()) {
<add> throw new CloudRuntimeException(
<add> "Failed to attach local data volume "
<add> + volume.getName()
<add> + " to VM "
<add> + vm.getDisplayName()
<add> + " as migration of local data volume is not allowed");
<add> }
<add> StoragePoolVO vmRootVolumePool = _storagePoolDao
<add> .findById(rootVolumeOfVm.getPoolId());
<add>
<add> try {
<add> volumeOnPrimaryStorage = moveVolume(volumeOnPrimaryStorage,
<add> vmRootVolumePool.getDataCenterId(),
<add> vmRootVolumePool.getPodId(),
<add> vmRootVolumePool.getClusterId(),
<add> dataDiskHyperType);
<add> } catch (ConcurrentOperationException e) {
<add> s_logger.debug("move volume failed", e);
<add> throw new CloudRuntimeException("move volume failed", e);
<add> }
<add> }
<add> }
<ide>
<ide> AsyncJobExecutor asyncExecutor = BaseAsyncJobExecutor
<ide> .getCurrentExecutor(); |
|
Java | apache-2.0 | 69f47a5e13492162e057f366083b33f97d788e23 | 0 | bessovistnyj/jvm-byte-code,bessovistnyj/jvm-byte-code | package ru.napadovskiuB.ArraySet;
import java.util.Arrays;
import java.util.Iterator;
import java.util.NoSuchElementException;
/**
* Package of CollectionPro testTask.
*
* @author Napadovskiy Bohdan
* @version 1.0
* @param <T> generic.
* @since 10.07.2017
*/
public class SimpleArraySet<T> implements Iterable<T> {
/**
*Main array.
*/
private Object[] mainArray;
/**
* Default size array.
*/
private final int defaultSize = 2;
/**
* array size.
*/
private int size;
/**
* index too add element.
*/
private int indexForWrite;
/**
* Constructor with default size of array.
*/
public SimpleArraySet() {
this.mainArray = new Object[defaultSize];
this.indexForWrite = 0;
}
/**
* Constructor with user size.
* @param size array size.
*/
public SimpleArraySet(int size) {
this.mainArray = new Object[size];
this.indexForWrite = 0;
}
/**
*Method resize array.
*/
private void resizeArray() {
int newCapacity = (this.mainArray.length * 2);
this.mainArray = Arrays.copyOf(this.mainArray, newCapacity);
}
/**
* Method check element in array.
* @param t element for find.
* @return result.
*/
public boolean checkElement(T t) {
boolean result = true;
for (int i = 0; i < this.mainArray.length; i++) {
if (this.mainArray[i] != null) {
if (this.mainArray[i].equals(t)) {
result = false;
break;
}
}
}
return result;
}
/**
*Method add element to array.
* @param t element too ad.
*/
public void add(T t) {
if (this.indexForWrite == (this.mainArray.length)) {
resizeArray();
}
if (checkElement(t)) {
this.mainArray[this.indexForWrite] = t;
this.indexForWrite++;
this.size++;
}
}
/**
* Method return size array.
* @return result.
*/
public int getSize() {
return this.size;
}
/**
*Method return element by index.
* @param index for search.
* @return element.
*/
public T getElement(int index) {
return (T) this.mainArray[index];
}
/**
* Class iterator.
* @return iterator.
*/
@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
/**
* current index.
*/
private int currentIndex;
/**
* Method check has next element.
* @return result.
*/
@Override
public boolean hasNext() {
return currentIndex < mainArray.length;
}
/**
* Method return next element.
* @return result.
* @throws NoSuchElementException
*/
@Override
public T next() throws NoSuchElementException {
if (hasNext()) {
int current = currentIndex;
this.currentIndex++;
return (T) mainArray[current];
}
throw new NoSuchElementException();
}
};
}
}
| CollectionPro/Set/src/main/java/ru/napadovskiuB/ArraySet/SimpleArraySet.java | package ru.napadovskiuB.ArraySet;
import java.util.Arrays;
import java.util.Iterator;
import java.util.NoSuchElementException;
/**
* Package of CollectionPro testTask.
*
* @author Napadovskiy Bohdan
* @version 1.0
* @param <T> generic.
* @since 10.07.2017
*/
public class SimpleArraySet<T> implements Iterable<T> {
/**
*Main array.
*/
private Object[] mainArray;
/**
* Default size array.
*/
private final int defaultSize = 2;
/**
* array size.
*/
private int size;
/**
* index too add element.
*/
private int indexForWrite;
/**
* Constructor with default size of array.
*/
public SimpleArraySet() {
this.mainArray = new Object[defaultSize];
this.indexForWrite = 0;
}
/**
* Constructor with user size.
* @param size array size.
*/
public SimpleArraySet(int size) {
this.mainArray = new Object[size];
this.indexForWrite = 0;
}
/**
*Method resize array.
*/
private void resizeArray() {
int newCapacity = (this.mainArray.length * 2);
this.mainArray = Arrays.copyOf(this.mainArray, newCapacity);
}
/**
* Method check element in array.
* @param t element for find.
* @return result.
*/
private boolean checkElement(T t) {
boolean result = true;
for (int i = 0; i < this.mainArray.length; i++) {
if (this.mainArray[i] != null) {
if (this.mainArray[i].equals(t)) {
result = false;
break;
}
}
}
return result;
}
/**
*Method add element to array.
* @param t element too ad.
*/
public void add(T t) {
if (this.indexForWrite == (this.mainArray.length)) {
resizeArray();
}
if (checkElement(t)) {
this.mainArray[this.indexForWrite] = t;
this.indexForWrite++;
this.size++;
}
}
/**
* Method return size array.
* @return result.
*/
public int getSize() {
return this.size;
}
/**
* Class iterator.
* @return iterator.
*/
@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
/**
* current index.
*/
private int currentIndex;
/**
* Method check has next element.
* @return result.
*/
@Override
public boolean hasNext() {
return currentIndex < mainArray.length;
}
/**
* Method return next element.
* @return result.
* @throws NoSuchElementException
*/
@Override
public T next() throws NoSuchElementException {
if (hasNext()) {
int current = currentIndex;
this.currentIndex++;
return (T) mainArray[current];
}
throw new NoSuchElementException();
}
};
}
}
| add task 1. Реализовать коллекцию Set на массиве
| CollectionPro/Set/src/main/java/ru/napadovskiuB/ArraySet/SimpleArraySet.java | add task 1. Реализовать коллекцию Set на массиве | <ide><path>ollectionPro/Set/src/main/java/ru/napadovskiuB/ArraySet/SimpleArraySet.java
<ide> public SimpleArraySet(int size) {
<ide> this.mainArray = new Object[size];
<ide> this.indexForWrite = 0;
<del>
<ide> }
<ide>
<ide>
<ide> private void resizeArray() {
<ide> int newCapacity = (this.mainArray.length * 2);
<ide> this.mainArray = Arrays.copyOf(this.mainArray, newCapacity);
<del>
<ide> }
<ide>
<ide> /**
<ide> * @param t element for find.
<ide> * @return result.
<ide> */
<del> private boolean checkElement(T t) {
<add> public boolean checkElement(T t) {
<ide> boolean result = true;
<ide> for (int i = 0; i < this.mainArray.length; i++) {
<ide> if (this.mainArray[i] != null) {
<ide> */
<ide> public int getSize() {
<ide> return this.size;
<add> }
<add>
<add>
<add> /**
<add> *Method return element by index.
<add> * @param index for search.
<add> * @return element.
<add> */
<add> public T getElement(int index) {
<add> return (T) this.mainArray[index];
<ide> }
<ide>
<ide> |
|
Java | apache-2.0 | error: pathspec 'src/com/mainmethod/trailmix1/sqlite/model/Placemark.java' did not match any file(s) known to git
| c5d4230262f1fde0ecd51e33d8d5f122054e7602 | 1 | MainMethod1/TrailMix-for-peel-android,MainMethod1/TrailMix-for-peel-android | package com.mainmethod.trailmix1.sqlite.model;
public class Placemark {
int id;
int trail_id;
public Placemark(int id, int trail_id) {
// TODO Auto-generated constructor stub
this.id = id;
this.trail_id = trail_id;
}
public Placemark() {
// TODO Auto-generated constructor stub
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public int getTrail_id() {
return trail_id;
}
public void setTrail_id(int trail_id) {
this.trail_id = trail_id;
}
}
| src/com/mainmethod/trailmix1/sqlite/model/Placemark.java | Added placemark model class
| src/com/mainmethod/trailmix1/sqlite/model/Placemark.java | Added placemark model class | <ide><path>rc/com/mainmethod/trailmix1/sqlite/model/Placemark.java
<add>package com.mainmethod.trailmix1.sqlite.model;
<add>
<add>public class Placemark {
<add>
<add> int id;
<add> int trail_id;
<add>
<add> public Placemark(int id, int trail_id) {
<add> // TODO Auto-generated constructor stub
<add> this.id = id;
<add> this.trail_id = trail_id;
<add> }
<add>
<add> public Placemark() {
<add> // TODO Auto-generated constructor stub
<add> }
<add>
<add> public int getId() {
<add> return id;
<add> }
<add>
<add> public void setId(int id) {
<add> this.id = id;
<add> }
<add>
<add> public int getTrail_id() {
<add> return trail_id;
<add> }
<add>
<add> public void setTrail_id(int trail_id) {
<add> this.trail_id = trail_id;
<add> }
<add>
<add>} |
|
Java | epl-1.0 | 84cf2b6328a7fb13b55b8e35b519b3d33b36c393 | 0 | theanuradha/debrief,alastrina123/debrief,pecko/debrief,theanuradha/debrief,pecko/debrief,pecko/debrief,alastrina123/debrief,debrief/debrief,debrief/debrief,pecko/debrief,alastrina123/debrief,pecko/debrief,debrief/debrief,theanuradha/debrief,pecko/debrief,debrief/debrief,theanuradha/debrief,theanuradha/debrief,alastrina123/debrief,pecko/debrief,debrief/debrief,alastrina123/debrief,theanuradha/debrief,alastrina123/debrief,theanuradha/debrief,debrief/debrief,alastrina123/debrief | /**
*
*/
package MWC.TacticalData;
import java.beans.IntrospectionException;
import java.beans.PropertyDescriptor;
import java.io.Serializable;
import java.util.Date;
import MWC.GUI.Editable;
import MWC.GUI.ExcludeFromRightClickEdit;
import MWC.GUI.FireExtended;
import MWC.GUI.FireReformatted;
import MWC.GUI.Plottable;
import MWC.GenericData.HiResDate;
import MWC.Utilities.TextFormatting.DebriefFormatDateTime;
public final class NarrativeEntry implements MWC.GUI.Plottable, Serializable,
ExcludeFromRightClickEdit
{
public static final String DTG = "DTG";
// ///////////////////////////////////////////
// member variables
// ///////////////////////////////////////////
private String _track;
private HiResDate _DTG;
private String _entry;
private String _type;
String _DTGString = null;
private transient NarrativeEntryInfo _myInfo;
/**
*
*/
private static final long serialVersionUID = 1L;
// ///////////////////////////////////////////
// constructor
// ///////////////////////////////////////////
/**
* new constructor - for narrative entries which include the type of entry
* (typically for SMNT narratives)
*
* @param track
* name of the track this applies to
* @param type
* what sort of entry this is (or null)
* @param DTG
* when the entry was recorded
* @param entry
* the content of the entry
*/
public NarrativeEntry(final String track, final String type,
final HiResDate DTG, final String entry)
{
_track = track;
_DTG = DTG;
_entry = entry;
_type = type;
}
/**
* old constructor - for when narratives didn't include the type attribute
*
* @param track
* name of the track this applies to
* @param DTG
* when the entry was recorded
* @param entry
* the content of the entry
*/
public NarrativeEntry(final String track, final HiResDate DTG,
final String entry)
{
this(track, null, DTG, entry);
}
// ///////////////////////////////////////////
// accessor methods
// ///////////////////////////////////////////
public final String getTrackName()
{
return _track;
}
public final String getSource()
{
return _track;
}
@FireReformatted
public final void setSource(String track)
{
_track = track;
}
public final String getEntry()
{
return _entry;
}
@FireReformatted
public void setEntry(String val)
{
_entry = val;
}
public final HiResDate getDTG()
{
return _DTG;
}
@FireExtended
public void setDTG(HiResDate date)
{
_DTG = date;
}
public final String getType()
{
return _type;
}
@FireReformatted
public void setType(String type)
{
_type = type;
}
public final String getDTGString()
{
if (_DTGString == null)
_DTGString = DebriefFormatDateTime.toStringHiRes(_DTG);
return _DTGString;
}
/**
* member function to meet requirements of comparable interface *
*/
public final int compareTo(final Plottable o)
{
final NarrativeEntry other = (NarrativeEntry) o;
int result = _DTG.compareTo(other._DTG);
if (result == 0)
result = 1;
return result;
}
// ///////////////////////////////////////////
// member methods to meet requirements of Plottable interface
// ///////////////////////////////////////////
/**
* paint this object to the specified canvas
*/
public final void paint(final MWC.GUI.CanvasType dest)
{
}
/**
* find the data area occupied by this item
*/
public final MWC.GenericData.WorldArea getBounds()
{
return null;
}
/**
* it this item currently visible?
*/
public final boolean getVisible()
{
return true;
}
/**
* set the visibility (although we ignore this)
*/
public final void setVisible(final boolean val)
{
}
/**
* how far away are we from this point? or return null if it can't be
* calculated
*/
public final double rangeFrom(final MWC.GenericData.WorldLocation other)
{
return -1;
}
/**
* get the editor for this item
*
* @return the BeanInfo data for this editable object
*/
public final MWC.GUI.Editable.EditorType getInfo()
{
if (_myInfo == null)
_myInfo = new NarrativeEntryInfo(this, this.toString());
return _myInfo;
}
/**
* whether there is any edit information for this item this is a convenience
* function to save creating the EditorType data first
*
* @return yes/no
*/
public final boolean hasEditor()
{
return true;
}
/**
* get the name of this entry, using the formatted DTG
*/
public final String getName()
{
return DebriefFormatDateTime.toStringHiRes(_DTG);
}
public final String toString()
{
return getName();
}
public boolean equals(Object obj) {
if (obj == null)
return false;
if (obj == this)
return true;
if (!(obj instanceof NarrativeEntry))
return false;
return super.equals(obj);
}
// ////////////////////////////////////////////////////
// bean info for this class
// ///////////////////////////////////////////////////
public final class NarrativeEntryInfo extends Editable.EditorType
{
public NarrativeEntryInfo(final NarrativeEntry data, final String theName)
{
super(data, theName, data.toString());
}
public final PropertyDescriptor[] getPropertyDescriptors()
{
try
{
final PropertyDescriptor[] myRes =
{
prop("Type", "the type of entry", FORMAT),
prop("Source", "the source for this entry", FORMAT),
prop(DTG, "the time this entry was recorded", FORMAT),
prop("Entry", "the content of this entry", FORMAT), };
return myRes;
}
catch (IntrospectionException e)
{
e.printStackTrace();
return super.getPropertyDescriptors();
}
}
}
// ////////////////////////////////////////////////////////////////////////////////////////////////
// testing for this class
// ////////////////////////////////////////////////////////////////////////////////////////////////
static public final class testMe extends junit.framework.TestCase
{
static public final String TEST_ALL_TEST_TYPE = "UNIT";
public testMe(final String val)
{
super(val);
}
public final void testMyParams()
{
HiResDate hd = new HiResDate(new Date());
final NarrativeEntry ne = new NarrativeEntry("aaa", "bbb", hd, "vvvv");
editableTesterSupport.testParams(ne, this);
}
}
} | trunk/org.mwc.cmap.legacy/src/MWC/TacticalData/NarrativeEntry.java | /**
*
*/
package MWC.TacticalData;
import java.beans.IntrospectionException;
import java.beans.PropertyDescriptor;
import java.io.Serializable;
import java.util.Date;
import MWC.GUI.Editable;
import MWC.GUI.ExcludeFromRightClickEdit;
import MWC.GUI.FireExtended;
import MWC.GUI.FireReformatted;
import MWC.GUI.Plottable;
import MWC.GenericData.HiResDate;
import MWC.Utilities.TextFormatting.DebriefFormatDateTime;
public final class NarrativeEntry implements MWC.GUI.Plottable, Serializable,
ExcludeFromRightClickEdit
{
public static final String DTG = "DTG";
// ///////////////////////////////////////////
// member variables
// ///////////////////////////////////////////
private String _track;
private HiResDate _DTG;
private String _entry;
private String _type;
String _DTGString = null;
private transient NarrativeEntryInfo _myInfo;
/**
*
*/
private static final long serialVersionUID = 1L;
// ///////////////////////////////////////////
// constructor
// ///////////////////////////////////////////
/**
* new constructor - for narrative entries which include the type of entry
* (typically for SMNT narratives)
*
* @param track
* name of the track this applies to
* @param type
* what sort of entry this is (or null)
* @param DTG
* when the entry was recorded
* @param entry
* the content of the entry
*/
public NarrativeEntry(final String track, final String type,
final HiResDate DTG, final String entry)
{
_track = track;
_DTG = DTG;
_entry = entry;
_type = type;
}
/**
* old constructor - for when narratives didn't include the type attribute
*
* @param track
* name of the track this applies to
* @param DTG
* when the entry was recorded
* @param entry
* the content of the entry
*/
public NarrativeEntry(final String track, final HiResDate DTG,
final String entry)
{
this(track, null, DTG, entry);
}
// ///////////////////////////////////////////
// accessor methods
// ///////////////////////////////////////////
public final String getTrackName()
{
return _track;
}
public final String getSource()
{
return _track;
}
@FireReformatted
public final void setSource(String track)
{
_track = track;
}
public final String getEntry()
{
return _entry;
}
@FireReformatted
public void setEntry(String val)
{
_entry = val;
}
public final HiResDate getDTG()
{
return _DTG;
}
@FireExtended
public void setDTG(HiResDate date)
{
_DTG = date;
}
public final String getType()
{
return _type;
}
@FireReformatted
public void setType(String type)
{
_type = type;
}
public final String getDTGString()
{
if (_DTGString == null)
_DTGString = DebriefFormatDateTime.toStringHiRes(_DTG);
return _DTGString;
}
/**
* member function to meet requirements of comparable interface *
*/
public final int compareTo(final Plottable o)
{
final NarrativeEntry other = (NarrativeEntry) o;
return _DTG.compareTo(other._DTG);
}
// ///////////////////////////////////////////
// member methods to meet requirements of Plottable interface
// ///////////////////////////////////////////
/**
* paint this object to the specified canvas
*/
public final void paint(final MWC.GUI.CanvasType dest)
{
}
/**
* find the data area occupied by this item
*/
public final MWC.GenericData.WorldArea getBounds()
{
return null;
}
/**
* it this item currently visible?
*/
public final boolean getVisible()
{
return true;
}
/**
* set the visibility (although we ignore this)
*/
public final void setVisible(final boolean val)
{
}
/**
* how far away are we from this point? or return null if it can't be
* calculated
*/
public final double rangeFrom(final MWC.GenericData.WorldLocation other)
{
return -1;
}
/**
* get the editor for this item
*
* @return the BeanInfo data for this editable object
*/
public final MWC.GUI.Editable.EditorType getInfo()
{
if (_myInfo == null)
_myInfo = new NarrativeEntryInfo(this, this.toString());
return _myInfo;
}
/**
* whether there is any edit information for this item this is a convenience
* function to save creating the EditorType data first
*
* @return yes/no
*/
public final boolean hasEditor()
{
return true;
}
/**
* get the name of this entry, using the formatted DTG
*/
public final String getName()
{
return DebriefFormatDateTime.toStringHiRes(_DTG);
}
public final String toString()
{
return getName();
}
// ////////////////////////////////////////////////////
// bean info for this class
// ///////////////////////////////////////////////////
public final class NarrativeEntryInfo extends Editable.EditorType
{
public NarrativeEntryInfo(final NarrativeEntry data, final String theName)
{
super(data, theName, data.toString());
}
public final PropertyDescriptor[] getPropertyDescriptors()
{
try
{
final PropertyDescriptor[] myRes =
{
prop("Type", "the type of entry", FORMAT),
prop("Source", "the source for this entry", FORMAT),
prop(DTG, "the time this entry was recorded", FORMAT),
prop("Entry", "the content of this entry", FORMAT), };
return myRes;
}
catch (IntrospectionException e)
{
e.printStackTrace();
return super.getPropertyDescriptors();
}
}
}
// ////////////////////////////////////////////////////////////////////////////////////////////////
// testing for this class
// ////////////////////////////////////////////////////////////////////////////////////////////////
static public final class testMe extends junit.framework.TestCase
{
static public final String TEST_ALL_TEST_TYPE = "UNIT";
public testMe(final String val)
{
super(val);
}
public final void testMyParams()
{
HiResDate hd = new HiResDate(new Date());
final NarrativeEntry ne = new NarrativeEntry("aaa", "bbb", hd, "vvvv");
editableTesterSupport.testParams(ne, this);
}
}
} | Fixed the comparision: allow to add 2 narrative entries of the same time
| trunk/org.mwc.cmap.legacy/src/MWC/TacticalData/NarrativeEntry.java | Fixed the comparision: allow to add 2 narrative entries of the same time | <ide><path>runk/org.mwc.cmap.legacy/src/MWC/TacticalData/NarrativeEntry.java
<ide> public final int compareTo(final Plottable o)
<ide> {
<ide> final NarrativeEntry other = (NarrativeEntry) o;
<del> return _DTG.compareTo(other._DTG);
<add> int result = _DTG.compareTo(other._DTG);
<add> if (result == 0)
<add> result = 1;
<add> return result;
<ide> }
<ide>
<ide> // ///////////////////////////////////////////
<ide> {
<ide> return getName();
<ide> }
<add>
<add> public boolean equals(Object obj) {
<add> if (obj == null)
<add> return false;
<add> if (obj == this)
<add> return true;
<add> if (!(obj instanceof NarrativeEntry))
<add> return false;
<add> return super.equals(obj);
<add> }
<add>
<ide>
<ide> // ////////////////////////////////////////////////////
<ide> // bean info for this class
<ide> editableTesterSupport.testParams(ne, this);
<ide> }
<ide> }
<del>
<add>
<add>
<ide> } |
|
Java | mit | abf1b5e76a9589a34e5ac7f06732fbdd7dfae525 | 0 | elBukkit/MagicPlugin,elBukkit/MagicLib,elBukkit/MagicPlugin,elBukkit/MagicPlugin | package com.elmakers.mine.bukkit.action;
import com.elmakers.mine.bukkit.api.block.MaterialBrush;
import com.elmakers.mine.bukkit.api.effect.EffectPlay;
import com.elmakers.mine.bukkit.api.effect.EffectPlayer;
import com.elmakers.mine.bukkit.api.magic.Mage;
import com.elmakers.mine.bukkit.api.magic.MageController;
import com.elmakers.mine.bukkit.api.spell.MageSpell;
import com.elmakers.mine.bukkit.api.spell.Spell;
import com.elmakers.mine.bukkit.api.block.UndoList;
import com.elmakers.mine.bukkit.spell.BaseSpell;
import com.elmakers.mine.bukkit.spell.BlockSpell;
import com.elmakers.mine.bukkit.spell.BrushSpell;
import com.elmakers.mine.bukkit.spell.TargetingSpell;
import com.elmakers.mine.bukkit.spell.UndoableSpell;
import com.elmakers.mine.bukkit.utility.Target;
import org.bukkit.Bukkit;
import org.bukkit.Chunk;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.World;
import org.bukkit.block.Block;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.entity.Entity;
import org.bukkit.entity.LivingEntity;
import org.bukkit.entity.Player;
import org.bukkit.metadata.FixedMetadataValue;
import org.bukkit.plugin.Plugin;
import org.bukkit.util.Vector;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.UUID;
import java.util.WeakHashMap;
import java.util.logging.Logger;
public class CastContext implements com.elmakers.mine.bukkit.api.action.CastContext {
protected static Random random;
protected final static int TELEPORT_RETRY_COUNT = 8;
protected final static int TELEPORT_RETRY_INTERVAL = 10;
private final Location location;
private final Entity entity;
private Location targetLocation;
private Entity targetEntity;
private UndoList undoList;
private String targetName = null;
private Collection<Entity> targetedEntities = Collections.newSetFromMap(new WeakHashMap<Entity, Boolean>());
private Set<UUID> targetMessagesSent = new HashSet<UUID>();
private Collection<EffectPlay> currentEffects = new ArrayList<EffectPlay>();
private Spell spell;
private BaseSpell baseSpell;
private BlockSpell blockSpell;
private MageSpell mageSpell;
private BrushSpell brushSpell;
private TargetingSpell targetingSpell;
private UndoableSpell undoSpell;
private MaterialBrush brush;
private CastContext base;
// Base Context
private int workAllowed = 500;
private int actionsPerformed;
public static double WAND_LOCATION_OFFSET = 0.5;
protected class TeleportTask implements Runnable {
private final CastContext context;
private final Entity entity;
private final Location location;
private final int verticalSearchDistance;
protected TeleportTask(CastContext context, final Entity entity, final Location location, final int verticalSearchDistance) {
this.context = context;
this.entity = entity;
this.location = location;
this.verticalSearchDistance = verticalSearchDistance;
}
@Override
public void run() {
context.delayedTeleport(entity, location, verticalSearchDistance);
}
}
public CastContext(Spell spell) {
this.setSpell(spell);
this.location = null;
this.entity = null;
this.base = this;
targetedEntities = Collections.newSetFromMap(new WeakHashMap<Entity, Boolean>());
targetMessagesSent = new HashSet<UUID>();
currentEffects = new ArrayList<EffectPlay>();
}
public CastContext(com.elmakers.mine.bukkit.api.action.CastContext copy) {
this(copy, copy.getEntity(), copy.getLocation());
}
public CastContext(com.elmakers.mine.bukkit.api.action.CastContext copy, Entity sourceEntity) {
this(copy, sourceEntity, null);
}
public CastContext(com.elmakers.mine.bukkit.api.action.CastContext copy, Location sourceLocation) {
this(copy, null, sourceLocation);
}
public CastContext(com.elmakers.mine.bukkit.api.action.CastContext copy, Entity sourceEntity, Location sourceLocation) {
this.location = sourceLocation;
this.entity = sourceEntity;
this.setSpell(copy.getSpell());
this.targetEntity = copy.getTargetEntity();
this.targetLocation = copy.getTargetLocation();
this.targetedEntities = copy.getTargetedEntities();
this.undoList = copy.getUndoList();
this.targetName = copy.getTargetName();
this.brush = copy.getBrush();
this.targetMessagesSent = copy.getTargetMessagesSent();
this.currentEffects = copy.getCurrentEffects();
if (copy instanceof CastContext)
{
this.base = ((CastContext)copy).base;
}
else
{
this.base = this;
}
}
public void setSpell(Spell spell)
{
this.spell = spell;
if (spell instanceof BaseSpell)
{
this.baseSpell = (BaseSpell)spell;
}
if (spell instanceof MageSpell)
{
this.mageSpell = (MageSpell)spell;
}
if (spell instanceof UndoableSpell)
{
this.undoSpell = (UndoableSpell)spell;
undoList = this.undoSpell.getUndoList();
}
if (spell instanceof TargetingSpell)
{
this.targetingSpell = (TargetingSpell)spell;
}
if (spell instanceof BlockSpell)
{
this.blockSpell = (BlockSpell)spell;
}
if (spell instanceof BrushSpell)
{
this.brushSpell = (BrushSpell)spell;
}
}
@Override
public Location getWandLocation() {
Location wandLocation = getEyeLocation();
if (wandLocation == null) {
return null;
}
Location toTheRight = wandLocation.clone();
toTheRight.setYaw(toTheRight.getYaw() + 90);
Vector wandDirection = toTheRight.getDirection();
wandLocation = wandLocation.clone();
wandLocation.add(wandDirection.multiply(WAND_LOCATION_OFFSET));
return wandLocation;
}
@Override
public Location getEyeLocation() {
if (location != null) {
return location;
}
if (entity != null) {
if (entity instanceof LivingEntity) {
return ((LivingEntity) entity).getEyeLocation();
}
return entity.getLocation();
}
return spell.getEyeLocation();
}
@Override
public Entity getEntity() {
if (entity != null) {
return entity;
}
return spell.getEntity();
}
@Override
public LivingEntity getLivingEntity() {
Entity entity = getEntity();
return entity instanceof LivingEntity ? (LivingEntity)entity : null;
}
@Override
public Location getLocation() {
if (location != null) {
return location;
}
if (entity != null) {
return entity.getLocation();
}
return spell.getLocation();
}
@Override
public Location getTargetLocation() {
return targetLocation;
}
@Override
public Block getTargetBlock() {
return targetLocation == null ? null : targetLocation.getBlock();
}
@Override
public Entity getTargetEntity() {
return targetEntity;
}
@Override
public Vector getDirection() {
return getLocation().getDirection();
}
@Override
public World getWorld() {
Location location = getLocation();
return location == null ? null : location.getWorld();
}
@Override
public void setTargetEntity(Entity targetEntity) {
this.targetEntity = targetEntity;
addTargetEntity(targetEntity);
}
@Override
public void setTargetLocation(Location targetLocation) {
this.targetLocation = targetLocation;
}
@Override
public Spell getSpell() {
return spell;
}
@Override
public Mage getMage() {
return this.mageSpell == null ? null : this.mageSpell.getMage();
}
@Override
public Collection<EffectPlayer> getEffects(String key) {
return spell.getEffects(key);
}
@Override
public MageController getController() {
Mage mage = getMage();
return mage == null ? null : mage.getController();
}
@Override
public void registerForUndo(Runnable runnable)
{
addWork(1);
if (undoList != null)
{
undoList.add(runnable);
}
}
@Override
public void registerModified(Entity entity)
{
addWork(5);
if (undoList != null)
{
undoList.modify(entity);
}
}
@Override
public void registerForUndo(Entity entity)
{
addWork(5);
if (undoList != null)
{
undoList.add(entity);
}
}
@Override
public void registerForUndo(Block block)
{
addWork(10);
if (undoList != null)
{
undoList.add(block);
}
}
@Override
public void updateBlock(Block block)
{
MageController controller = getController();
if (controller != null)
{
controller.updateBlock(block);
}
}
@Override
public void registerVelocity(Entity entity)
{
addWork(5);
if (undoList != null)
{
undoList.modifyVelocity(entity);
}
}
@Override
public void registerMoved(Entity entity)
{
addWork(5);
if (undoList != null)
{
undoList.move(entity);
}
}
@Override
public void registerPotionEffects(Entity entity)
{
addWork(5);
if (undoList != null)
{
undoList.addPotionEffects(entity);
}
}
@Override
public Block getPreviousBlock()
{
return targetingSpell != null ? targetingSpell.getPreviousBlock() : null;
}
@Override
public boolean isIndestructible(Block block) {
return blockSpell != null ? blockSpell.isIndestructible(block) : true;
}
@Override
public boolean hasBuildPermission(Block block) {
return baseSpell != null ? baseSpell.hasBuildPermission(block) : false;
}
@Override
public boolean hasBreakPermission(Block block) {
return baseSpell != null ? baseSpell.hasBreakPermission(block) : false;
}
@Override
public void playEffects(String key)
{
playEffects(key, 1.0f);
}
@Override
public void playEffects(String effectName, float scale)
{
Location wand = getWandLocation();
Location location = getEyeLocation();
Collection<EffectPlayer> effects = getEffects(effectName);
if (effects.size() > 0)
{
Collection<Entity> targeted = getTargetedEntities();
Entity sourceEntity = getEntity();
Entity targetEntity = getTargetEntity();
Location targetLocation = getTargetLocation();
for (EffectPlayer player : effects)
{
// Track effect plays for cancelling
player.setEffectPlayList(currentEffects);
// Set scale
player.setScale(scale);
// Set material and color
player.setMaterial(spell.getEffectMaterial());
player.setColor(spell.getEffectColor());
String overrideParticle = spell.getEffectParticle();
player.setParticleOverride(overrideParticle);
Location source = player.shouldUseWandLocation() ? wand : location;
Location target = targetLocation;
if (!player.shouldUseHitLocation() && targetEntity != null) {
if (targetEntity instanceof LivingEntity) {
target = ((LivingEntity)targetEntity).getEyeLocation();
} else {
target = targetEntity.getLocation();
}
}
player.start(source, sourceEntity, target, targetEntity, targeted);
}
}
}
@Override
public void cancelEffects() {
for (EffectPlay player : currentEffects) {
player.cancel();
}
currentEffects.clear();
}
@Override
public String getMessage(String key) {
return getMessage(key, key);
}
@Override
public String getMessage(String key, String def) {
return baseSpell != null ? baseSpell.getMessage(key, def) : def;
}
@Override
public Location findPlaceToStand(Location target, int verticalSearchDistance, boolean goUp) {
return baseSpell != null ? baseSpell.findPlaceToStand(target, goUp, verticalSearchDistance) : location;
}
public Location findPlaceToStand(Location targetLoc, int verticalSearchDistance) {
return baseSpell != null ? baseSpell.findPlaceToStand(targetLoc, verticalSearchDistance, verticalSearchDistance) : location;
}
@Override
public int getVerticalSearchDistance() {
return baseSpell != null ? baseSpell.getVerticalSearchDistance() : 4;
}
@Override
public boolean isOkToStandIn(Material material)
{
return baseSpell != null ? baseSpell.isOkToStandIn(material) : true;
}
@Override
public boolean isWater(Material mat)
{
return (mat == Material.WATER || mat == Material.STATIONARY_WATER);
}
@Override
public boolean isOkToStandOn(Material material)
{
return (material != Material.AIR && material != Material.LAVA && material != Material.STATIONARY_LAVA);
}
@Override
public boolean allowPassThrough(Material material)
{
return baseSpell != null ? baseSpell.allowPassThrough(material) : true;
}
@Override
public void castMessage(String message)
{
if (baseSpell != null)
{
baseSpell.castMessage(getMessage(message));
}
}
@Override
public void sendMessage(String message)
{
if (baseSpell != null)
{
baseSpell.sendMessage(getMessage(message));
}
}
@Override
public void setTargetedLocation(Location location)
{
if (targetingSpell != null)
{
targetingSpell.setTarget(location);
}
}
@Override
public Block findBlockUnder(Block block)
{
if (targetingSpell != null)
{
block = targetingSpell.findBlockUnder(block);
}
return block;
}
@Override
public Block findSpaceAbove(Block block)
{
if (targetingSpell != null)
{
block = targetingSpell.findSpaceAbove(block);
}
return block;
}
@Override
public boolean isTransparent(Material material)
{
if (targetingSpell != null)
{
return targetingSpell.isTransparent(material);
}
return material.isTransparent();
}
@Override
public boolean isPassthrough(Material material)
{
if (baseSpell != null)
{
return baseSpell.isPassthrough(material);
}
return material.isTransparent();
}
@Override
public boolean isDestructible(Block block)
{
if (blockSpell != null)
{
return blockSpell.isDestructible(block);
}
return true;
}
@Override
public boolean areAnyDestructible(Block block)
{
if (blockSpell != null)
{
return blockSpell.areAnyDestructible(block);
}
return true;
}
@Override
public boolean isTargetable(Material material)
{
if (targetingSpell != null)
{
return targetingSpell.isTargetable(material);
}
return true;
}
@Override
public boolean canTarget(Entity entity) {
return targetingSpell == null ? true : targetingSpell.canTarget(entity);
}
@Override
public MaterialBrush getBrush() {
if (brush != null) {
return brush;
}
return brushSpell == null ? null : brushSpell.getBrush();
}
@Override
public void setBrush(MaterialBrush brush) {
this.brush = brush;
}
@Override
public void addTargetEntity(Entity entity)
{
if (entity != null)
{
targetedEntities.add(entity);
}
}
@Override
public Collection<Entity> getTargetedEntities()
{
return targetedEntities;
}
@Override
public void getTargetEntities(int targetCount, Collection<WeakReference<Entity>> entities)
{
if (targetingSpell == null)
{
return;
}
List<Target> candidates = ((TargetingSpell)spell).getAllTargetEntities();
if (targetCount < 0) {
targetCount = entities.size();
}
for (int i = 0; i < targetCount && i < candidates.size(); i++) {
Target target = candidates.get(i);
entities.add(new WeakReference<Entity>(target.getEntity()));
}
}
@Override
public void messageTargets(String messageKey)
{
Mage mage = getMage();
MageController controller = getController();
LivingEntity sourceEntity = mage == null ? null : mage.getLivingEntity();
String playerMessage = getMessage(messageKey);
if (!mage.isStealth() && playerMessage.length() > 0)
{
Collection<Entity> targets = getTargetedEntities();
for (Entity target : targets)
{
UUID targetUUID = target.getUniqueId();
if (target instanceof Player && target != sourceEntity && !targetMessagesSent.contains(targetUUID))
{
targetMessagesSent.add(targetUUID);
playerMessage = playerMessage.replace("$spell", spell.getName());
Mage targetMage = controller.getMage(target);
targetMage.sendMessage(playerMessage);
}
}
}
}
@Override
public Block getInteractBlock() {
Location location = getEyeLocation();
if (location == null) return null;
Block playerBlock = location.getBlock();
if (isTargetable(playerBlock.getType())) return playerBlock;
Vector direction = location.getDirection().normalize();
return location.add(direction).getBlock();
}
@Override
public Random getRandom() {
if (random == null) {
random = new Random();
}
return random;
}
@Override
public UndoList getUndoList() {
return undoList;
}
@Override
public String getTargetName() {
return targetName;
}
@Override
public void setTargetName(String name) {
targetName = name;
}
@Override
public Logger getLogger() {
return getController().getLogger();
}
@Override
public int getWorkAllowed() {
return this.base.workAllowed;
}
@Override
public void setWorkAllowed(int work) {
this.base.workAllowed = work;
}
@Override
public void addWork(int work) {
this.base.workAllowed -= work;
}
@Override
public void performedActions(int count) {
this.base.actionsPerformed += count;
}
@Override
public int getActionsPerformed() {
return base.actionsPerformed;
}
@Override
public void finish() {
if (undoSpell != null && undoSpell.isUndoable())
{
if (!undoList.isScheduled())
{
getController().update(undoList);
}
getMage().registerForUndo(undoList);
}
castMessage("cast_finish");
}
@Override
public void retarget(int range, double fov, double closeRange, double closeFOV, boolean useHitbox) {
if (targetingSpell != null)
{
targetingSpell.retarget(range, fov, closeRange, closeFOV, useHitbox);
setTargetEntity(targetingSpell.getTargetEntity());
setTargetLocation(targetingSpell.getTargetLocation());
}
}
@Override
public void retarget(int range, double fov, double closeRange, double closeFOV, boolean useHitbox, Vector offset, boolean targetSpaceRequired, int targetMinOffset) {
if (targetingSpell != null)
{
targetingSpell.retarget(range, fov, closeRange, closeFOV, useHitbox, offset, targetSpaceRequired, targetMinOffset);
setTargetEntity(targetingSpell.getTargetEntity());
setTargetLocation(targetingSpell.getTargetLocation());
}
}
@Override
public com.elmakers.mine.bukkit.api.action.CastContext getBaseContext()
{
return base;
}
@Override
public Set<UUID> getTargetMessagesSent() {
return targetMessagesSent;
}
@Override
public Collection<EffectPlay> getCurrentEffects() {
return currentEffects;
}
@Override
public void registerBreakable(Block block, int breakable) {
if (block == null || block.getType() == Material.AIR) return;
MageController controller = getController();
if (breakable > 0) {
block.setMetadata("breakable", new FixedMetadataValue(controller.getPlugin(), breakable));
} else {
block.removeMetadata("breakable", controller.getPlugin());
}
undoList.setUndoBreakable(true);
}
@Override
public void registerReflective(Block block, double reflectivity) {
if (block == null || block.getType() == Material.AIR) return;
MageController controller = getController();
if (reflectivity > 0) {
block.setMetadata("backfire", new FixedMetadataValue(controller.getPlugin(), reflectivity));
} else {
block.removeMetadata("backfire", controller.getPlugin());
}
undoList.setUndoReflective(true);
}
@Override
public Plugin getPlugin() {
MageController controller = getController();
return controller == null ? null : controller.getPlugin();
}
@Override
public void teleport(final Entity entity, final Location location, final int verticalSearchDistance)
{
Plugin plugin = getPlugin();
Bukkit.getScheduler().scheduleSyncDelayedTask(plugin, new Runnable() {
public void run() {
delayedTeleport(entity, location, verticalSearchDistance);
}
}, 1);
}
protected void delayedTeleport(final Entity entity, final Location location, final int verticalSearchDistance)
{
MageController controller = getController();
Chunk chunk = location.getBlock().getChunk();
int retryCount = 0;
if (!chunk.isLoaded()) {
chunk.load(true);
if (retryCount < TELEPORT_RETRY_COUNT) {
Plugin plugin = controller.getPlugin();
Bukkit.getScheduler().scheduleSyncDelayedTask(plugin, new Runnable() {
public void run() {
delayedTeleport(entity, location, verticalSearchDistance);
}
}, TELEPORT_RETRY_INTERVAL);
}
return;
}
registerMoved(entity);
Location targetLocation = findPlaceToStand(location, verticalSearchDistance);
if (targetLocation != null) {
setTargetedLocation(targetLocation);
// Hacky double-teleport to work-around vanilla suffocation checks
boolean isWorldChange = !targetLocation.getWorld().equals(entity.getWorld());
entity.teleport(targetLocation);
if (isWorldChange) {
entity.teleport(targetLocation);
}
sendMessage("teleport");
playEffects("teleport");
} else {
sendMessage("teleport_failed");
playEffects("teleport_failed");
}
}
@Override
public void setSpellParameters(ConfigurationSection parameters) {
if (baseSpell != null) {
baseSpell.processParameters(parameters);
}
}
@Override
public Set<Material> getMaterialSet(String key) {
return getController().getMaterialSet(key);
}
}
| src/main/java/com/elmakers/mine/bukkit/action/CastContext.java | package com.elmakers.mine.bukkit.action;
import com.elmakers.mine.bukkit.api.block.MaterialBrush;
import com.elmakers.mine.bukkit.api.effect.EffectPlay;
import com.elmakers.mine.bukkit.api.effect.EffectPlayer;
import com.elmakers.mine.bukkit.api.magic.Mage;
import com.elmakers.mine.bukkit.api.magic.MageController;
import com.elmakers.mine.bukkit.api.spell.MageSpell;
import com.elmakers.mine.bukkit.api.spell.Spell;
import com.elmakers.mine.bukkit.api.block.UndoList;
import com.elmakers.mine.bukkit.spell.BaseSpell;
import com.elmakers.mine.bukkit.spell.BlockSpell;
import com.elmakers.mine.bukkit.spell.BrushSpell;
import com.elmakers.mine.bukkit.spell.TargetingSpell;
import com.elmakers.mine.bukkit.spell.UndoableSpell;
import com.elmakers.mine.bukkit.utility.Target;
import org.bukkit.Bukkit;
import org.bukkit.Chunk;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.World;
import org.bukkit.block.Block;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.entity.Entity;
import org.bukkit.entity.LivingEntity;
import org.bukkit.entity.Player;
import org.bukkit.metadata.FixedMetadataValue;
import org.bukkit.plugin.Plugin;
import org.bukkit.util.Vector;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.UUID;
import java.util.WeakHashMap;
import java.util.logging.Logger;
public class CastContext implements com.elmakers.mine.bukkit.api.action.CastContext {
protected static Random random;
protected final static int TELEPORT_RETRY_COUNT = 8;
protected final static int TELEPORT_RETRY_INTERVAL = 10;
private final Location location;
private final Entity entity;
private Location targetLocation;
private Entity targetEntity;
private UndoList undoList;
private String targetName = null;
private Collection<Entity> targetedEntities = Collections.newSetFromMap(new WeakHashMap<Entity, Boolean>());
private Set<UUID> targetMessagesSent = new HashSet<UUID>();
private Collection<EffectPlay> currentEffects = new ArrayList<EffectPlay>();
private Spell spell;
private BaseSpell baseSpell;
private BlockSpell blockSpell;
private MageSpell mageSpell;
private BrushSpell brushSpell;
private TargetingSpell targetingSpell;
private UndoableSpell undoSpell;
private MaterialBrush brush;
private CastContext base;
// Base Context
private int workAllowed = 500;
private int actionsPerformed;
public static double WAND_LOCATION_OFFSET = 0.5;
protected class TeleportTask implements Runnable {
private final CastContext context;
private final Entity entity;
private final Location location;
private final int verticalSearchDistance;
protected TeleportTask(CastContext context, final Entity entity, final Location location, final int verticalSearchDistance) {
this.context = context;
this.entity = entity;
this.location = location;
this.verticalSearchDistance = verticalSearchDistance;
}
@Override
public void run() {
context.delayedTeleport(entity, location, verticalSearchDistance);
}
}
public CastContext(Spell spell) {
this.setSpell(spell);
this.location = null;
this.entity = null;
this.base = this;
targetedEntities = Collections.newSetFromMap(new WeakHashMap<Entity, Boolean>());
targetMessagesSent = new HashSet<UUID>();
currentEffects = new ArrayList<EffectPlay>();
}
public CastContext(com.elmakers.mine.bukkit.api.action.CastContext copy) {
this(copy, copy.getEntity(), copy.getLocation());
}
public CastContext(com.elmakers.mine.bukkit.api.action.CastContext copy, Entity sourceEntity) {
this(copy, sourceEntity, null);
}
public CastContext(com.elmakers.mine.bukkit.api.action.CastContext copy, Location sourceLocation) {
this(copy, null, sourceLocation);
}
public CastContext(com.elmakers.mine.bukkit.api.action.CastContext copy, Entity sourceEntity, Location sourceLocation) {
this.location = sourceLocation;
this.entity = sourceEntity;
this.setSpell(copy.getSpell());
this.targetEntity = copy.getTargetEntity();
this.targetLocation = copy.getTargetLocation();
this.targetedEntities = copy.getTargetedEntities();
this.undoList = copy.getUndoList();
this.targetName = copy.getTargetName();
this.brush = copy.getBrush();
this.targetMessagesSent = copy.getTargetMessagesSent();
this.currentEffects = copy.getCurrentEffects();
if (copy instanceof CastContext)
{
this.base = ((CastContext)copy).base;
}
else
{
this.base = this;
}
}
public void setSpell(Spell spell)
{
this.spell = spell;
if (spell instanceof BaseSpell)
{
this.baseSpell = (BaseSpell)spell;
}
if (spell instanceof MageSpell)
{
this.mageSpell = (MageSpell)spell;
}
if (spell instanceof UndoableSpell)
{
this.undoSpell = (UndoableSpell)spell;
undoList = this.undoSpell.getUndoList();
}
if (spell instanceof TargetingSpell)
{
this.targetingSpell = (TargetingSpell)spell;
}
if (spell instanceof BlockSpell)
{
this.blockSpell = (BlockSpell)spell;
}
if (spell instanceof BrushSpell)
{
this.brushSpell = (BrushSpell)spell;
}
}
@Override
public Location getWandLocation() {
Location wandLocation = getEyeLocation();
if (wandLocation == null) {
return null;
}
Location toTheRight = wandLocation.clone();
toTheRight.setYaw(toTheRight.getYaw() + 90);
Vector wandDirection = toTheRight.getDirection();
wandLocation = wandLocation.clone();
wandLocation.add(wandDirection.multiply(WAND_LOCATION_OFFSET));
return wandLocation;
}
@Override
public Location getEyeLocation() {
if (location != null) {
return location;
}
if (entity != null) {
if (entity instanceof LivingEntity) {
return ((LivingEntity) entity).getEyeLocation();
}
return entity.getLocation();
}
return spell.getEyeLocation();
}
@Override
public Entity getEntity() {
if (entity != null) {
return entity;
}
return spell.getEntity();
}
@Override
public LivingEntity getLivingEntity() {
Entity entity = getEntity();
return entity instanceof LivingEntity ? (LivingEntity)entity : null;
}
@Override
public Location getLocation() {
if (location != null) {
return location;
}
if (entity != null) {
return entity.getLocation();
}
return spell.getLocation();
}
@Override
public Location getTargetLocation() {
return targetLocation;
}
@Override
public Block getTargetBlock() {
return targetLocation == null ? null : targetLocation.getBlock();
}
@Override
public Entity getTargetEntity() {
return targetEntity;
}
@Override
public Vector getDirection() {
return getLocation().getDirection();
}
@Override
public World getWorld() {
Location location = getLocation();
return location == null ? null : location.getWorld();
}
@Override
public void setTargetEntity(Entity targetEntity) {
this.targetEntity = targetEntity;
addTargetEntity(targetEntity);
}
@Override
public void setTargetLocation(Location targetLocation) {
this.targetLocation = targetLocation;
}
@Override
public Spell getSpell() {
return spell;
}
@Override
public Mage getMage() {
return this.mageSpell == null ? null : this.mageSpell.getMage();
}
@Override
public Collection<EffectPlayer> getEffects(String key) {
return spell.getEffects(key);
}
@Override
public MageController getController() {
Mage mage = getMage();
return mage == null ? null : mage.getController();
}
@Override
public void registerForUndo(Runnable runnable)
{
addWork(1);
if (undoList != null)
{
undoList.add(runnable);
}
}
@Override
public void registerModified(Entity entity)
{
addWork(5);
if (undoList != null)
{
undoList.modify(entity);
}
}
@Override
public void registerForUndo(Entity entity)
{
addWork(5);
if (undoList != null)
{
undoList.add(entity);
}
}
@Override
public void registerForUndo(Block block)
{
addWork(10);
if (undoList != null)
{
undoList.add(block);
}
}
@Override
public void updateBlock(Block block)
{
MageController controller = getController();
if (controller != null)
{
controller.updateBlock(block);
}
}
@Override
public void registerVelocity(Entity entity)
{
addWork(5);
if (undoList != null)
{
undoList.modifyVelocity(entity);
}
}
@Override
public void registerMoved(Entity entity)
{
addWork(5);
if (undoList != null)
{
undoList.move(entity);
}
}
@Override
public void registerPotionEffects(Entity entity)
{
addWork(5);
if (undoList != null)
{
undoList.addPotionEffects(entity);
}
}
@Override
public Block getPreviousBlock()
{
return targetingSpell != null ? targetingSpell.getPreviousBlock() : null;
}
@Override
public boolean isIndestructible(Block block) {
return blockSpell != null ? blockSpell.isIndestructible(block) : true;
}
@Override
public boolean hasBuildPermission(Block block) {
return baseSpell != null ? baseSpell.hasBuildPermission(block) : false;
}
@Override
public boolean hasBreakPermission(Block block) {
return baseSpell != null ? baseSpell.hasBreakPermission(block) : false;
}
@Override
public void playEffects(String key)
{
playEffects(key, 1.0f);
}
@Override
public void playEffects(String effectName, float scale)
{
Location wand = getWandLocation();
Location location = getEyeLocation();
Collection<EffectPlayer> effects = getEffects(effectName);
if (effects.size() > 0)
{
Collection<Entity> targeted = getTargetedEntities();
Entity sourceEntity = getEntity();
Entity targetEntity = getTargetEntity();
Location targetLocation = getTargetLocation();
for (EffectPlayer player : effects)
{
// Track effect plays for cancelling
player.setEffectPlayList(currentEffects);
// Set scale
player.setScale(scale);
// Set material and color
player.setMaterial(spell.getEffectMaterial());
player.setColor(spell.getEffectColor());
String overrideParticle = spell.getEffectParticle();
player.setParticleOverride(overrideParticle);
Location source = player.shouldUseWandLocation() ? wand : location;
Location target = targetLocation;
if (!player.shouldUseHitLocation() && targetEntity != null) {
if (targetEntity instanceof LivingEntity) {
target = ((LivingEntity)targetEntity).getEyeLocation();
} else {
target = targetEntity.getLocation();
}
}
player.start(source, sourceEntity, target, targetEntity, targeted);
}
}
}
@Override
public void cancelEffects() {
for (EffectPlay player : currentEffects) {
player.cancel();
}
currentEffects.clear();
}
@Override
public String getMessage(String key) {
return getMessage(key, key);
}
@Override
public String getMessage(String key, String def) {
return baseSpell != null ? baseSpell.getMessage(key, def) : def;
}
@Override
public Location findPlaceToStand(Location target, int verticalSearchDistance, boolean goUp) {
return baseSpell != null ? baseSpell.findPlaceToStand(target, goUp, verticalSearchDistance) : location;
}
public Location findPlaceToStand(Location targetLoc, int verticalSearchDistance) {
return baseSpell != null ? baseSpell.findPlaceToStand(targetLoc, verticalSearchDistance, verticalSearchDistance) : location;
}
@Override
public int getVerticalSearchDistance() {
return baseSpell != null ? baseSpell.getVerticalSearchDistance() : 4;
}
@Override
public boolean isOkToStandIn(Material material)
{
return baseSpell != null ? baseSpell.isOkToStandIn(material) : true;
}
@Override
public boolean isWater(Material mat)
{
return (mat == Material.WATER || mat == Material.STATIONARY_WATER);
}
@Override
public boolean isOkToStandOn(Material material)
{
return (material != Material.AIR && material != Material.LAVA && material != Material.STATIONARY_LAVA);
}
@Override
public boolean allowPassThrough(Material material)
{
return baseSpell != null ? baseSpell.allowPassThrough(material) : true;
}
@Override
public void castMessage(String message)
{
if (baseSpell != null)
{
baseSpell.castMessage(getMessage(message));
}
}
@Override
public void sendMessage(String message)
{
if (baseSpell != null)
{
baseSpell.sendMessage(getMessage(message));
}
}
@Override
public void setTargetedLocation(Location location)
{
if (targetingSpell != null)
{
targetingSpell.setTarget(location);
}
}
@Override
public Block findBlockUnder(Block block)
{
if (targetingSpell != null)
{
block = targetingSpell.findBlockUnder(block);
}
return block;
}
@Override
public Block findSpaceAbove(Block block)
{
if (targetingSpell != null)
{
block = targetingSpell.findSpaceAbove(block);
}
return block;
}
@Override
public boolean isTransparent(Material material)
{
if (targetingSpell != null)
{
return targetingSpell.isTransparent(material);
}
return material.isTransparent();
}
@Override
public boolean isPassthrough(Material material)
{
if (baseSpell != null)
{
return baseSpell.isPassthrough(material);
}
return material.isTransparent();
}
@Override
public boolean isDestructible(Block block)
{
if (blockSpell != null)
{
return blockSpell.isDestructible(block);
}
return true;
}
@Override
public boolean areAnyDestructible(Block block)
{
if (blockSpell != null)
{
return blockSpell.areAnyDestructible(block);
}
return true;
}
@Override
public boolean isTargetable(Material material)
{
if (targetingSpell != null)
{
return targetingSpell.isTargetable(material);
}
return true;
}
@Override
public boolean canTarget(Entity entity) {
return targetingSpell == null ? true : targetingSpell.canTarget(entity);
}
@Override
public MaterialBrush getBrush() {
if (brush != null) {
return brush;
}
return brushSpell == null ? null : brushSpell.getBrush();
}
@Override
public void setBrush(MaterialBrush brush) {
this.brush = brush;
}
@Override
public void addTargetEntity(Entity entity)
{
if (entity != null)
{
targetedEntities.add(entity);
}
}
@Override
public Collection<Entity> getTargetedEntities()
{
return targetedEntities;
}
@Override
public void getTargetEntities(int targetCount, Collection<WeakReference<Entity>> entities)
{
if (targetingSpell == null)
{
return;
}
List<Target> candidates = ((TargetingSpell)spell).getAllTargetEntities();
if (targetCount < 0) {
targetCount = entities.size();
}
for (int i = 0; i < targetCount && i < candidates.size(); i++) {
Target target = candidates.get(i);
entities.add(new WeakReference<Entity>(target.getEntity()));
}
}
@Override
public void messageTargets(String messageKey)
{
Mage mage = getMage();
MageController controller = getController();
LivingEntity sourceEntity = mage == null ? null : mage.getLivingEntity();
String playerMessage = getMessage(messageKey);
if (!mage.isStealth() && playerMessage.length() > 0)
{
Collection<Entity> targets = getTargetedEntities();
for (Entity target : targets)
{
UUID targetUUID = target.getUniqueId();
if (target instanceof Player && target != sourceEntity && !targetMessagesSent.contains(targetUUID))
{
targetMessagesSent.add(targetUUID);
playerMessage = playerMessage.replace("$spell", spell.getName());
Mage targetMage = controller.getMage(target);
targetMage.sendMessage(playerMessage);
}
}
}
}
@Override
public Block getInteractBlock() {
Location location = getEyeLocation();
if (location == null) return null;
Block playerBlock = location.getBlock();
if (isTargetable(playerBlock.getType())) return playerBlock;
Vector direction = location.getDirection().normalize();
return location.add(direction).getBlock();
}
@Override
public Random getRandom() {
if (random == null) {
random = new Random();
}
return random;
}
@Override
public UndoList getUndoList() {
return undoList;
}
@Override
public String getTargetName() {
return targetName;
}
@Override
public void setTargetName(String name) {
targetName = name;
}
@Override
public Logger getLogger() {
return getController().getLogger();
}
@Override
public int getWorkAllowed() {
return this.base.workAllowed;
}
@Override
public void setWorkAllowed(int work) {
this.base.workAllowed = work;
}
@Override
public void addWork(int work) {
this.base.workAllowed -= work;
}
@Override
public void performedActions(int count) {
this.base.actionsPerformed += count;
}
@Override
public int getActionsPerformed() {
return base.actionsPerformed;
}
@Override
public void finish() {
if (undoSpell != null && undoSpell.isUndoable())
{
if (!undoList.isScheduled())
{
getController().update(undoList);
}
getMage().registerForUndo(undoList);
}
castMessage("cast_finish");
}
@Override
public void retarget(int range, double fov, double closeRange, double closeFOV, boolean useHitbox) {
if (targetingSpell != null)
{
targetingSpell.retarget(range, fov, closeRange, closeFOV, useHitbox);
setTargetEntity(targetingSpell.getTargetEntity());
setTargetLocation(targetingSpell.getTargetLocation());
}
}
@Override
public void retarget(int range, double fov, double closeRange, double closeFOV, boolean useHitbox, Vector offset, boolean targetSpaceRequired, int targetMinOffset) {
if (targetingSpell != null)
{
targetingSpell.retarget(range, fov, closeRange, closeFOV, useHitbox, offset, targetSpaceRequired, targetMinOffset);
setTargetEntity(targetingSpell.getTargetEntity());
setTargetLocation(targetingSpell.getTargetLocation());
}
}
@Override
public com.elmakers.mine.bukkit.api.action.CastContext getBaseContext()
{
return base;
}
@Override
public Set<UUID> getTargetMessagesSent() {
return targetMessagesSent;
}
@Override
public Collection<EffectPlay> getCurrentEffects() {
return currentEffects;
}
@Override
public void registerBreakable(Block block, int breakable) {
if (block == null || block.getType() == Material.AIR) return;
MageController controller = getController();
if (breakable > 0) {
block.setMetadata("breakable", new FixedMetadataValue(controller.getPlugin(), breakable));
} else {
block.removeMetadata("breakable", controller.getPlugin());
}
undoList.setUndoBreakable(true);
}
@Override
public void registerReflective(Block block, double reflectivity) {
if (block == null || block.getType() == Material.AIR) return;
MageController controller = getController();
if (reflectivity > 0) {
block.setMetadata("backfire", new FixedMetadataValue(controller.getPlugin(), reflectivity));
} else {
block.removeMetadata("backfire", controller.getPlugin());
}
undoList.setUndoReflective(true);
}
@Override
public Plugin getPlugin() {
MageController controller = getController();
return controller == null ? null : controller.getPlugin();
}
@Override
public void teleport(final Entity entity, final Location location, final int verticalSearchDistance)
{
Plugin plugin = getPlugin();
Bukkit.getScheduler().scheduleSyncDelayedTask(plugin, new Runnable() {
public void run() {
delayedTeleport(entity, location, verticalSearchDistance);
}
}, 1);
}
protected void delayedTeleport(final Entity entity, final Location location, final int verticalSearchDistance)
{
MageController controller = getController();
Chunk chunk = location.getBlock().getChunk();
int retryCount = 0;
if (!chunk.isLoaded()) {
chunk.load(true);
if (retryCount < TELEPORT_RETRY_COUNT) {
Plugin plugin = controller.getPlugin();
Bukkit.getScheduler().scheduleSyncDelayedTask(plugin, new Runnable() {
public void run() {
delayedTeleport(entity, location, verticalSearchDistance);
}
}, TELEPORT_RETRY_INTERVAL);
}
return;
}
playEffects("teleport");
registerMoved(entity);
Location targetLocation = findPlaceToStand(location, verticalSearchDistance);
if (targetLocation != null) {
setTargetedLocation(targetLocation);
entity.teleport(targetLocation);
}
}
@Override
public void setSpellParameters(ConfigurationSection parameters) {
if (baseSpell != null) {
baseSpell.processParameters(parameters);
}
}
@Override
public Set<Material> getMaterialSet(String key) {
return getController().getMaterialSet(key);
}
}
| Trust our internal safety TP checks more than the vanilla ones. Vanilla ones put you on top of the nether.
| src/main/java/com/elmakers/mine/bukkit/action/CastContext.java | Trust our internal safety TP checks more than the vanilla ones. Vanilla ones put you on top of the nether. | <ide><path>rc/main/java/com/elmakers/mine/bukkit/action/CastContext.java
<ide> return;
<ide> }
<ide>
<del> playEffects("teleport");
<del>
<ide> registerMoved(entity);
<ide> Location targetLocation = findPlaceToStand(location, verticalSearchDistance);
<ide> if (targetLocation != null) {
<ide> setTargetedLocation(targetLocation);
<add> // Hacky double-teleport to work-around vanilla suffocation checks
<add> boolean isWorldChange = !targetLocation.getWorld().equals(entity.getWorld());
<ide> entity.teleport(targetLocation);
<add> if (isWorldChange) {
<add> entity.teleport(targetLocation);
<add> }
<add> sendMessage("teleport");
<add> playEffects("teleport");
<add> } else {
<add> sendMessage("teleport_failed");
<add> playEffects("teleport_failed");
<ide> }
<ide> }
<ide> |
|
JavaScript | bsd-3-clause | f8348b8230ccf2f03afd167e19f6827728a96965 | 0 | greggman/HappyFunTimes,greggman/HappyFunTimes,LDodson/HappyFunTimes,LDodson/HappyFunTimes | /*
* Copyright 2014, Gregg Tavares.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Gregg Tavares. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*eslint no-process-exit:0*/
'use strict';
var settingsOptionSpec = {
option: 'settings', type: 'String', description: 'settings: key=value, ',
};
var optionSpec = {
options: [
{ option: 'help', alias: 'h', type: 'Boolean', description: 'displays help'},
{ option: 'config-path', type: 'String', description: 'config path'},
{ option: 'settings-path', type: 'String', description: 'settings path'},
{ option: 'private-server', type: 'Boolean', description: 'do not inform happyfuntimes.net about this server. Users will not be able to use happyfuntimes.net to connect to your games'},
{ option: 'debug', type: 'Boolean', description: 'check more things'},
{ option: 'verbose', type: 'Boolean', description: 'print more stuff'},
settingsOptionSpec,
].concat(require('./common-cli-options').options),
helpStyle: {
typeSeparator: '=',
descriptionSeparator: ' : ',
initialIndent: 4,
},
};
var config = require('../lib/config');
var log = require('../lib/log');
var optionator = require('optionator')(optionSpec);
try {
var args = optionator.parse(process.argv);
} catch (e) {
console.error(e);
process.exit(1);
}
var printHelp = function() {
var settings = [];
Object.keys(require('../lib/config').getSettings().settings).forEach(function(key) {
settings.push(key);
});
settingsOptionSpec.description += settings.join(', ');
console.log(optionator.generateHelp());
process.exit(0);
};
if (args.help) {
printHelp();
}
log.config(args);
config.setup(args);
if (args.settings) {
var settings = config.getSettings().settings;
args.settings.split(',').forEach(function(setting) {
var keyValue = setting.split('=');
var key = keyValue[0];
var value = keyValue[1];
if (!settings[key]) {
console.error('no setting: "' + key + '"');
printHelp();
}
settings[key] = value;
});
}
if (args.appMode) {
require('../lib/games').init();
}
var browser = require('../lib/browser');
var DNSServer = require('./dnsserver');
var iputils = require('../lib/iputils');
var Promise = require('promise');
var HFTServer = require('./hft-server');
var server;
var launchBrowser = function(err) {
var next = function() {
if (err) {
console.error(err);
process.exit(1);
} else {
if (args.appMode) {
console.log([
'',
'---==> HappyFunTimes Running <==---',
'',
].join('\n'));
}
}
};
var p;
if (args.appMode || args.show) {
var name = args.show || 'games';
p = browser.launch('http://localhost:' + server.getSettings().port + '/' + name + '.html', config.getConfig().preferredBrowser);
} else {
p = Promise.resolve();
}
p.then(function() {
next();
}).catch(function(err) {
console.error(err);
next();
});
};
server = new HFTServer(args, launchBrowser);
if (args.dns) {
// This doesn't need to dynamicallly check for a change in ip address
// because it should only be used in a static ip address sitaution
// since DNS has to be static for our use-case.
(function() {
return new DNSServer({address: args.address || iputils.getIpAddresses()[0]});
}());
server.on('ports', function(ports) {
if (ports.indexOf('80') < 0 && ports.indexOf(80) < 0) {
console.error('You specified --dns but happyFunTimes could not use port 80.');
console.error('Do you need to run this as admin or use sudo?');
process.exit(1);
}
});
}
| server/server.js | /*
* Copyright 2014, Gregg Tavares.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Gregg Tavares. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
"use strict";
var settingsOptionSpec = {
option: 'settings', type: 'String', description: 'settings: key=value, ',
};
var optionSpec = {
options: [
{ option: 'help', alias: 'h', type: 'Boolean', description: 'displays help'},
{ option: 'config-path', type: 'String', description: 'config path'},
{ option: 'settings-path', type: 'String', description: 'settings path'},
{ option: 'private-server', type: 'Boolean', description: 'do not inform happyfuntimes.net about this server. Users will not be able to use happyfuntimes.net to connect to your games'},
{ option: 'debug', type: 'Boolean', description: 'check more things'},
{ option: 'verbose', type: 'Boolean', description: 'print more stuff'},
settingsOptionSpec,
].concat(require('./common-cli-options').options),
helpStyle: {
typeSeparator: '=',
descriptionSeparator: ' : ',
initialIndent: 4,
},
};
var config = require('../lib/config');
var log = require('../lib/log');
var optionator = require('optionator')(optionSpec);
try {
var args = optionator.parse(process.argv);
} catch (e) {
console.error(e);
process.exit(1);
}
var printHelp = function() {
var settings = [];
Object.keys(require('../lib/config').getSettings().settings).forEach(function(key) {
settings.push(key);
});
settingsOptionSpec.description += settings.join(", ");
console.log(optionator.generateHelp());
process.exit(0);
};
if (args.help) {
printHelp();
}
log.config(args);
config.setup(args);
if (args.settings) {
var settings = config.getSettings().settings
args.settings.split(",").forEach(function(setting) {
var keyValue = setting.split("=");
var key = keyValue[0];
var value = keyValue[1];
if (!settings[key]) {
console.error("no setting: '" + key + "'");
printHelp();
}
settings[key] = value;
});
}
if (args.appMode) {
require('../lib/games').init();
}
var browser = require('../lib/browser');
var debug = require('debug')('server');
var DNSServer = require('./dnsserver');
var iputils = require('../lib/iputils');
var Promise = require('promise');
var HFTServer = require('./hft-server');
var server;
var launchBrowser = function(err) {
var next = function() {
if (err) {
console.error(err);
process.exit(1);
} else {
if (args.appMode) {
console.log([
"",
"---==> HappyFunTimes Running <==---",
"",
].join("\n"))
}
}
};
var p;
if (args.appMode || args.show) {
var name = args.show || "games";
p = browser.launch("http://localhost:" + server.getSettings().port + "/" + name + ".html", config.getConfig().preferredBrowser);
} else {
p = Promise.resolve();
}
p.then(function() {
next();
}).catch(function(err) {
console.error(err);
next();
});
}
server = new HFTServer(args, launchBrowser);
if (args.dns) {
// This doesn't need to dynamicallly check for a change in ip address
// because it should only be used in a static ip address sitaution
// since DNS has to be static for our use-case.
var dnsServer = new DNSServer({address: args.address || iputils.getIpAddresses()[0]});
server.on('ports', function(ports) {
if (ports.indexOf("80") < 0 && ports.indexOf(80) < 0) {
console.error("You specified --dns but happyFunTimes could not use port 80.");
console.error("Do you need to run this as admin or use sudo?");
process.exit(1);
}
});
}
| lint server.js
| server/server.js | lint server.js | <ide><path>erver/server.js
<ide> * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
<ide> */
<ide>
<del>"use strict";
<add>/*eslint no-process-exit:0*/
<add>
<add>'use strict';
<ide>
<ide> var settingsOptionSpec = {
<ide> option: 'settings', type: 'String', description: 'settings: key=value, ',
<ide> Object.keys(require('../lib/config').getSettings().settings).forEach(function(key) {
<ide> settings.push(key);
<ide> });
<del> settingsOptionSpec.description += settings.join(", ");
<add> settingsOptionSpec.description += settings.join(', ');
<ide>
<ide> console.log(optionator.generateHelp());
<ide> process.exit(0);
<ide> log.config(args);
<ide> config.setup(args);
<ide> if (args.settings) {
<del> var settings = config.getSettings().settings
<del> args.settings.split(",").forEach(function(setting) {
<del> var keyValue = setting.split("=");
<add> var settings = config.getSettings().settings;
<add> args.settings.split(',').forEach(function(setting) {
<add> var keyValue = setting.split('=');
<ide> var key = keyValue[0];
<ide> var value = keyValue[1];
<ide> if (!settings[key]) {
<del> console.error("no setting: '" + key + "'");
<add> console.error('no setting: "' + key + '"');
<ide> printHelp();
<ide> }
<ide> settings[key] = value;
<ide> }
<ide>
<ide> var browser = require('../lib/browser');
<del>var debug = require('debug')('server');
<ide> var DNSServer = require('./dnsserver');
<ide> var iputils = require('../lib/iputils');
<ide> var Promise = require('promise');
<ide> } else {
<ide> if (args.appMode) {
<ide> console.log([
<del> "",
<del> "---==> HappyFunTimes Running <==---",
<del> "",
<del> ].join("\n"))
<add> '',
<add> '---==> HappyFunTimes Running <==---',
<add> '',
<add> ].join('\n'));
<ide> }
<ide> }
<ide> };
<ide>
<ide> var p;
<ide> if (args.appMode || args.show) {
<del> var name = args.show || "games";
<del> p = browser.launch("http://localhost:" + server.getSettings().port + "/" + name + ".html", config.getConfig().preferredBrowser);
<add> var name = args.show || 'games';
<add> p = browser.launch('http://localhost:' + server.getSettings().port + '/' + name + '.html', config.getConfig().preferredBrowser);
<ide> } else {
<ide> p = Promise.resolve();
<ide> }
<ide> console.error(err);
<ide> next();
<ide> });
<del>}
<add>};
<ide>
<ide> server = new HFTServer(args, launchBrowser);
<ide>
<ide> // This doesn't need to dynamicallly check for a change in ip address
<ide> // because it should only be used in a static ip address sitaution
<ide> // since DNS has to be static for our use-case.
<del> var dnsServer = new DNSServer({address: args.address || iputils.getIpAddresses()[0]});
<add> (function() {
<add> return new DNSServer({address: args.address || iputils.getIpAddresses()[0]});
<add> }());
<ide> server.on('ports', function(ports) {
<del> if (ports.indexOf("80") < 0 && ports.indexOf(80) < 0) {
<del> console.error("You specified --dns but happyFunTimes could not use port 80.");
<del> console.error("Do you need to run this as admin or use sudo?");
<add> if (ports.indexOf('80') < 0 && ports.indexOf(80) < 0) {
<add> console.error('You specified --dns but happyFunTimes could not use port 80.');
<add> console.error('Do you need to run this as admin or use sudo?');
<ide> process.exit(1);
<ide> }
<ide> }); |
|
Java | epl-1.0 | d4fee4eb690d46f879d05765a41825ac7018fe49 | 0 | 522986491/controller,mandeepdhami/controller,inocybe/odl-controller,inocybe/odl-controller,mandeepdhami/controller,tx1103mark/controller,mandeepdhami/controller,opendaylight/controller,tx1103mark/controller,Johnson-Chou/test,tx1103mark/controller,Sushma7785/OpenDayLight-Load-Balancer,Sushma7785/OpenDayLight-Load-Balancer,mandeepdhami/controller,tx1103mark/controller,522986491/controller,Johnson-Chou/test | /*
* Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.controller.sal.connect.netconf.util;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import java.io.IOException;
import java.net.URI;
import java.util.AbstractMap;
import java.util.Collections;
import java.util.Map;
import java.util.Map.Entry;
import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import javax.xml.transform.dom.DOMResult;
import javax.xml.transform.dom.DOMSource;
import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
import org.opendaylight.controller.netconf.api.NetconfMessage;
import org.opendaylight.controller.netconf.util.messages.NetconfMessageUtil;
import org.opendaylight.controller.netconf.util.xml.XmlUtil;
import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.netconf.base._1._0.rev110601.edit.config.input.EditContent;
import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.netconf.notification._1._0.rev080714.CreateSubscriptionInput;
import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.netconf.monitoring.rev101004.NetconfState;
import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.netconf.notifications.rev120206.NetconfCapabilityChange;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.common.RpcError;
import org.opendaylight.yangtools.yang.common.RpcError.ErrorSeverity;
import org.opendaylight.yangtools.yang.common.RpcResultBuilder;
import org.opendaylight.yangtools.yang.data.api.ModifyAction;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.AnyXmlNode;
import org.opendaylight.yangtools.yang.data.api.schema.ContainerNode;
import org.opendaylight.yangtools.yang.data.api.schema.DataContainerChild;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeStreamWriter;
import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeWriter;
import org.opendaylight.yangtools.yang.data.impl.codec.xml.XMLStreamNormalizedNodeStreamWriter;
import org.opendaylight.yangtools.yang.data.impl.schema.Builders;
import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
import org.opendaylight.yangtools.yang.data.impl.schema.builder.api.NormalizedNodeAttrBuilder;
import org.opendaylight.yangtools.yang.model.api.ContainerSchemaNode;
import org.opendaylight.yangtools.yang.model.api.NotificationDefinition;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
import org.opendaylight.yangtools.yang.model.api.SchemaPath;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
public class NetconfMessageTransformUtil {
private static final Logger LOG= LoggerFactory.getLogger(NetconfMessageTransformUtil.class);
public static final String MESSAGE_ID_ATTR = "message-id";
public static final XMLOutputFactory XML_FACTORY;
static {
XML_FACTORY = XMLOutputFactory.newFactory();
XML_FACTORY.setProperty(XMLOutputFactory.IS_REPAIRING_NAMESPACES, false);
}
public static final QName CREATE_SUBSCRIPTION_RPC_QNAME = QName.cachedReference(QName.create(CreateSubscriptionInput.QNAME, "create-subscription"));
private static final String SUBTREE = "subtree";
// Blank document used for creation of new DOM nodes
private static final Document BLANK_DOCUMENT = XmlUtil.newDocument();
public static final String EVENT_TIME = "eventTime";
private NetconfMessageTransformUtil() {}
public static final QName IETF_NETCONF_MONITORING = QName.create(NetconfState.QNAME, "ietf-netconf-monitoring");
public static final QName GET_DATA_QNAME = QName.create(IETF_NETCONF_MONITORING, "data");
public static final QName GET_SCHEMA_QNAME = QName.create(IETF_NETCONF_MONITORING, "get-schema");
public static final QName IETF_NETCONF_MONITORING_SCHEMA_FORMAT = QName.create(IETF_NETCONF_MONITORING, "format");
public static final QName IETF_NETCONF_MONITORING_SCHEMA_LOCATION = QName.create(IETF_NETCONF_MONITORING, "location");
public static final QName IETF_NETCONF_MONITORING_SCHEMA_IDENTIFIER = QName.create(IETF_NETCONF_MONITORING, "identifier");
public static final QName IETF_NETCONF_MONITORING_SCHEMA_VERSION = QName.create(IETF_NETCONF_MONITORING, "version");
public static final QName IETF_NETCONF_MONITORING_SCHEMA_NAMESPACE = QName.create(IETF_NETCONF_MONITORING, "namespace");
public static final QName IETF_NETCONF_NOTIFICATIONS = QName.create(NetconfCapabilityChange.QNAME, "ietf-netconf-notifications");
public static final QName NETCONF_QNAME = QName.cachedReference(QName.create("urn:ietf:params:xml:ns:netconf:base:1.0", "2011-06-01", "netconf"));
public static final URI NETCONF_URI = NETCONF_QNAME.getNamespace();
public static final QName NETCONF_DATA_QNAME = QName.create(NETCONF_QNAME, "data");
public static final QName NETCONF_RPC_REPLY_QNAME = QName.create(NETCONF_QNAME, "rpc-reply");
public static final QName NETCONF_OK_QNAME = QName.create(NETCONF_QNAME, "ok");
public static final QName NETCONF_ERROR_OPTION_QNAME = QName.create(NETCONF_QNAME, "error-option");
public static final QName NETCONF_RUNNING_QNAME = QName.create(NETCONF_QNAME, "running");
public static final QName NETCONF_SOURCE_QNAME = QName.create(NETCONF_QNAME, "source");
public static final QName NETCONF_CANDIDATE_QNAME = QName.create(NETCONF_QNAME, "candidate");
public static final QName NETCONF_TARGET_QNAME = QName.create(NETCONF_QNAME, "target");
public static final QName NETCONF_CONFIG_QNAME = QName.create(NETCONF_QNAME, "config");
public static final QName NETCONF_COMMIT_QNAME = QName.create(NETCONF_QNAME, "commit");
public static final QName NETCONF_VALIDATE_QNAME = QName.create(NETCONF_QNAME, "validate");
public static final QName NETCONF_COPY_CONFIG_QNAME = QName.create(NETCONF_QNAME, "copy-config");
public static final QName NETCONF_OPERATION_QNAME = QName.create(NETCONF_QNAME, "operation");
public static final QName NETCONF_DEFAULT_OPERATION_QNAME = QName.create(NETCONF_OPERATION_QNAME, "default-operation");
public static final QName NETCONF_EDIT_CONFIG_QNAME = QName.create(NETCONF_QNAME, "edit-config");
public static final QName NETCONF_GET_CONFIG_QNAME = QName.create(NETCONF_QNAME, "get-config");
public static final QName NETCONF_DISCARD_CHANGES_QNAME = QName.create(NETCONF_QNAME, "discard-changes");
public static final QName NETCONF_TYPE_QNAME = QName.create(NETCONF_QNAME, "type");
public static final QName NETCONF_FILTER_QNAME = QName.create(NETCONF_QNAME, "filter");
public static final QName NETCONF_GET_QNAME = QName.create(NETCONF_QNAME, "get");
public static final QName NETCONF_RPC_QNAME = QName.create(NETCONF_QNAME, "rpc");
public static final URI NETCONF_ROLLBACK_ON_ERROR_URI = URI
.create("urn:ietf:params:netconf:capability:rollback-on-error:1.0");
public static final String ROLLBACK_ON_ERROR_OPTION = "rollback-on-error";
public static final URI NETCONF_CANDIDATE_URI = URI
.create("urn:ietf:params:netconf:capability:candidate:1.0");
public static final URI NETCONF_NOTIFICATONS_URI = URI
.create("urn:ietf:params:netconf:capability:notification:1.0");
public static final URI NETCONF_RUNNING_WRITABLE_URI = URI
.create("urn:ietf:params:netconf:capability:writable-running:1.0");
public static final QName NETCONF_LOCK_QNAME = QName.create(NETCONF_QNAME, "lock");
public static final QName NETCONF_UNLOCK_QNAME = QName.create(NETCONF_QNAME, "unlock");
// Discard changes message
public static final ContainerNode DISCARD_CHANGES_RPC_CONTENT =
Builders.containerBuilder().withNodeIdentifier(new YangInstanceIdentifier.NodeIdentifier(NETCONF_DISCARD_CHANGES_QNAME)).build();
// Commit changes message
public static final ContainerNode COMMIT_RPC_CONTENT =
Builders.containerBuilder().withNodeIdentifier(new YangInstanceIdentifier.NodeIdentifier(NETCONF_COMMIT_QNAME)).build();
// Get message
public static final ContainerNode GET_RPC_CONTENT =
Builders.containerBuilder().withNodeIdentifier(new YangInstanceIdentifier.NodeIdentifier(NETCONF_GET_QNAME)).build();
// Create-subscription changes message
public static final ContainerNode CREATE_SUBSCRIPTION_RPC_CONTENT =
Builders.containerBuilder().withNodeIdentifier(new YangInstanceIdentifier.NodeIdentifier(CREATE_SUBSCRIPTION_RPC_QNAME)).build();
public static final DataContainerChild<?, ?> EMPTY_FILTER;
static {
final NormalizedNodeAttrBuilder<YangInstanceIdentifier.NodeIdentifier, DOMSource, AnyXmlNode> anyXmlBuilder = Builders.anyXmlBuilder().withNodeIdentifier(toId(NETCONF_FILTER_QNAME));
anyXmlBuilder.withAttributes(Collections.singletonMap(NETCONF_TYPE_QNAME, SUBTREE));
final Element element = XmlUtil.createElement(BLANK_DOCUMENT, NETCONF_FILTER_QNAME.getLocalName(), Optional.of(NETCONF_FILTER_QNAME.getNamespace().toString()));
element.setAttributeNS(NETCONF_FILTER_QNAME.getNamespace().toString(), NETCONF_TYPE_QNAME.getLocalName(), "subtree");
anyXmlBuilder.withValue(new DOMSource(element));
EMPTY_FILTER = anyXmlBuilder.build();
}
public static DataContainerChild<?, ?> toFilterStructure(final YangInstanceIdentifier identifier, final SchemaContext ctx) {
final NormalizedNodeAttrBuilder<YangInstanceIdentifier.NodeIdentifier, DOMSource, AnyXmlNode> anyXmlBuilder = Builders.anyXmlBuilder().withNodeIdentifier(toId(NETCONF_FILTER_QNAME));
anyXmlBuilder.withAttributes(Collections.singletonMap(NETCONF_TYPE_QNAME, SUBTREE));
final NormalizedNode<?, ?> filterContent = ImmutableNodes.fromInstanceId(ctx, identifier);
final Element element = XmlUtil.createElement(BLANK_DOCUMENT, NETCONF_FILTER_QNAME.getLocalName(), Optional.of(NETCONF_FILTER_QNAME.getNamespace().toString()));
element.setAttributeNS(NETCONF_FILTER_QNAME.getNamespace().toString(), NETCONF_TYPE_QNAME.getLocalName(), "subtree");
try {
writeNormalizedNode(filterContent, new DOMResult(element), SchemaPath.ROOT, ctx);
} catch (IOException | XMLStreamException e) {
throw new IllegalStateException("Unable to serialize filter element for path " + identifier, e);
}
anyXmlBuilder.withValue(new DOMSource(element));
return anyXmlBuilder.build();
}
public static void checkValidReply(final NetconfMessage input, final NetconfMessage output)
throws NetconfDocumentedException {
final String inputMsgId = input.getDocument().getDocumentElement().getAttribute(MESSAGE_ID_ATTR);
final String outputMsgId = output.getDocument().getDocumentElement().getAttribute(MESSAGE_ID_ATTR);
if(inputMsgId.equals(outputMsgId) == false) {
final Map<String,String> errorInfo = ImmutableMap.<String,String>builder()
.put( "actual-message-id", outputMsgId )
.put( "expected-message-id", inputMsgId )
.build();
throw new NetconfDocumentedException( "Response message contained unknown \"message-id\"",
null, NetconfDocumentedException.ErrorType.protocol,
NetconfDocumentedException.ErrorTag.bad_attribute,
NetconfDocumentedException.ErrorSeverity.error, errorInfo );
}
}
public static void checkSuccessReply(final NetconfMessage output) throws NetconfDocumentedException {
if(NetconfMessageUtil.isErrorMessage(output)) {
throw NetconfDocumentedException.fromXMLDocument(output.getDocument());
}
}
public static RpcError toRpcError( final NetconfDocumentedException ex ) {
final StringBuilder infoBuilder = new StringBuilder();
final Map<String, String> errorInfo = ex.getErrorInfo();
if(errorInfo != null) {
for( final Entry<String,String> e: errorInfo.entrySet() ) {
infoBuilder.append( '<' ).append( e.getKey() ).append( '>' ).append( e.getValue() )
.append( "</" ).append( e.getKey() ).append( '>' );
}
}
final ErrorSeverity severity = toRpcErrorSeverity( ex.getErrorSeverity() );
return severity == ErrorSeverity.ERROR ?
RpcResultBuilder.newError(
toRpcErrorType( ex.getErrorType() ), ex.getErrorTag().getTagValue(),
ex.getLocalizedMessage(), null, infoBuilder.toString(), ex.getCause() ) :
RpcResultBuilder.newWarning(
toRpcErrorType( ex.getErrorType() ), ex.getErrorTag().getTagValue(),
ex.getLocalizedMessage(), null, infoBuilder.toString(), ex.getCause() );
}
private static ErrorSeverity toRpcErrorSeverity( final NetconfDocumentedException.ErrorSeverity severity ) {
switch (severity) {
case warning:
return RpcError.ErrorSeverity.WARNING;
default:
return RpcError.ErrorSeverity.ERROR;
}
}
private static RpcError.ErrorType toRpcErrorType(final NetconfDocumentedException.ErrorType type) {
switch (type) {
case protocol:
return RpcError.ErrorType.PROTOCOL;
case rpc:
return RpcError.ErrorType.RPC;
case transport:
return RpcError.ErrorType.TRANSPORT;
default:
return RpcError.ErrorType.APPLICATION;
}
}
public static YangInstanceIdentifier.NodeIdentifier toId(final YangInstanceIdentifier.PathArgument qname) {
return toId(qname.getNodeType());
}
public static YangInstanceIdentifier.NodeIdentifier toId(final QName nodeType) {
return new YangInstanceIdentifier.NodeIdentifier(nodeType);
}
public static Element getDataSubtree(final Document doc) {
return (Element) doc.getElementsByTagNameNS(NETCONF_URI.toString(), "data").item(0);
}
public static boolean isDataRetrievalOperation(final QName rpc) {
return NETCONF_URI.equals(rpc.getNamespace())
&& (NETCONF_GET_CONFIG_QNAME.getLocalName().equals(rpc.getLocalName())
|| NETCONF_GET_QNAME.getLocalName().equals(rpc.getLocalName()));
}
public static ContainerSchemaNode createSchemaForDataRead(final SchemaContext schemaContext) {
return new NodeContainerProxy(NETCONF_DATA_QNAME, schemaContext.getChildNodes());
}
public static ContainerSchemaNode createSchemaForNotification(final NotificationDefinition next) {
return new NodeContainerProxy(next.getQName(), next.getChildNodes(), next.getAvailableAugmentations());
}
public static ContainerNode wrap(final QName name, final DataContainerChild<?, ?>... node) {
return Builders.containerBuilder().withNodeIdentifier(toId(name)).withValue(ImmutableList.copyOf(node)).build();
}
public static DataContainerChild<?, ?> createEditConfigStructure(final SchemaContext ctx, final YangInstanceIdentifier dataPath,
final Optional<ModifyAction> operation, final Optional<NormalizedNode<?, ?>> lastChildOverride) {
final NormalizedNode<?, ?> configContent;
if (dataPath.isEmpty()) {
Preconditions.checkArgument(lastChildOverride.isPresent(), "Data has to be present when creating structure for top level element");
Preconditions.checkArgument(lastChildOverride.get() instanceof DataContainerChild<?, ?>,
"Data has to be either container or a list node when creating structure for top level element, but was: %s", lastChildOverride.get());
configContent = lastChildOverride.get();
} else {
final Entry<QName, ModifyAction> modifyOperation =
operation.isPresent() ? new AbstractMap.SimpleEntry<>(NETCONF_OPERATION_QNAME, operation.get()) : null;
configContent = ImmutableNodes.fromInstanceId(ctx, dataPath, lastChildOverride, Optional.fromNullable(modifyOperation));
}
final Element element = XmlUtil.createElement(BLANK_DOCUMENT, NETCONF_CONFIG_QNAME.getLocalName(), Optional.of(NETCONF_CONFIG_QNAME.getNamespace().toString()));
try {
writeNormalizedNode(configContent, new DOMResult(element), SchemaPath.ROOT, ctx);
} catch (IOException | XMLStreamException e) {
throw new IllegalStateException("Unable to serialize edit config content element for path " + dataPath, e);
}
final DOMSource value = new DOMSource(element);
return Builders.choiceBuilder().withNodeIdentifier(toId(EditContent.QNAME)).withChild(
Builders.anyXmlBuilder().withNodeIdentifier(toId(NETCONF_CONFIG_QNAME)).withValue(value).build()).build();
}
public static SchemaPath toPath(final QName rpc) {
return SchemaPath.create(true, rpc);
}
// FIXME similar code is in netconf-notifications-impl , DRY
public static void writeNormalizedNode(final NormalizedNode<?, ?> normalized, final DOMResult result, final SchemaPath schemaPath, final SchemaContext context)
throws IOException, XMLStreamException {
NormalizedNodeWriter normalizedNodeWriter = null;
NormalizedNodeStreamWriter normalizedNodeStreamWriter = null;
XMLStreamWriter writer = null;
try {
writer = XML_FACTORY.createXMLStreamWriter(result);
normalizedNodeStreamWriter = XMLStreamNormalizedNodeStreamWriter.create(writer, context, schemaPath);
normalizedNodeWriter = NormalizedNodeWriter.forStreamWriter(normalizedNodeStreamWriter);
normalizedNodeWriter.write(normalized);
normalizedNodeWriter.flush();
} finally {
try {
if(normalizedNodeWriter != null) {
normalizedNodeWriter.close();
}
if(normalizedNodeStreamWriter != null) {
normalizedNodeStreamWriter.close();
}
if(writer != null) {
writer.close();
}
} catch (final Exception e) {
LOG.warn("Unable to close resource properly", e);
}
}
}
}
| opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/util/NetconfMessageTransformUtil.java | /*
* Copyright (c) 2014 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.controller.sal.connect.netconf.util;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import java.io.IOException;
import java.net.URI;
import java.util.AbstractMap;
import java.util.Collections;
import java.util.Map;
import java.util.Map.Entry;
import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import javax.xml.transform.dom.DOMResult;
import javax.xml.transform.dom.DOMSource;
import org.opendaylight.controller.netconf.api.NetconfDocumentedException;
import org.opendaylight.controller.netconf.api.NetconfMessage;
import org.opendaylight.controller.netconf.util.messages.NetconfMessageUtil;
import org.opendaylight.controller.netconf.util.xml.XmlUtil;
import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.netconf.base._1._0.rev110601.edit.config.input.EditContent;
import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.netconf.notification._1._0.rev080714.CreateSubscriptionInput;
import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.netconf.monitoring.rev101004.NetconfState;
import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.netconf.notifications.rev120206.NetconfCapabilityChange;
import org.opendaylight.yangtools.yang.common.QName;
import org.opendaylight.yangtools.yang.common.RpcError;
import org.opendaylight.yangtools.yang.common.RpcError.ErrorSeverity;
import org.opendaylight.yangtools.yang.common.RpcResultBuilder;
import org.opendaylight.yangtools.yang.data.api.ModifyAction;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier;
import org.opendaylight.yangtools.yang.data.api.schema.AnyXmlNode;
import org.opendaylight.yangtools.yang.data.api.schema.ContainerNode;
import org.opendaylight.yangtools.yang.data.api.schema.DataContainerChild;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeStreamWriter;
import org.opendaylight.yangtools.yang.data.api.schema.stream.NormalizedNodeWriter;
import org.opendaylight.yangtools.yang.data.impl.codec.xml.XMLStreamNormalizedNodeStreamWriter;
import org.opendaylight.yangtools.yang.data.impl.schema.Builders;
import org.opendaylight.yangtools.yang.data.impl.schema.ImmutableNodes;
import org.opendaylight.yangtools.yang.data.impl.schema.builder.api.NormalizedNodeAttrBuilder;
import org.opendaylight.yangtools.yang.model.api.ContainerSchemaNode;
import org.opendaylight.yangtools.yang.model.api.NotificationDefinition;
import org.opendaylight.yangtools.yang.model.api.SchemaContext;
import org.opendaylight.yangtools.yang.model.api.SchemaPath;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
public class NetconfMessageTransformUtil {
private static final Logger LOG= LoggerFactory.getLogger(NetconfMessageTransformUtil.class);
public static final String MESSAGE_ID_ATTR = "message-id";
public static final XMLOutputFactory XML_FACTORY;
static {
XML_FACTORY = XMLOutputFactory.newFactory();
XML_FACTORY.setProperty(XMLOutputFactory.IS_REPAIRING_NAMESPACES, false);
}
public static final QName CREATE_SUBSCRIPTION_RPC_QNAME = QName.cachedReference(QName.create(CreateSubscriptionInput.QNAME, "create-subscription"));
private static final String SUBTREE = "subtree";
// Blank document used for creation of new DOM nodes
private static final Document BLANK_DOCUMENT = XmlUtil.newDocument();
public static final String EVENT_TIME = "eventTime";
private NetconfMessageTransformUtil() {}
public static final QName IETF_NETCONF_MONITORING = QName.create(NetconfState.QNAME, "ietf-netconf-monitoring");
public static final QName GET_DATA_QNAME = QName.create(IETF_NETCONF_MONITORING, "data");
public static final QName GET_SCHEMA_QNAME = QName.create(IETF_NETCONF_MONITORING, "get-schema");
public static final QName IETF_NETCONF_MONITORING_SCHEMA_FORMAT = QName.create(IETF_NETCONF_MONITORING, "format");
public static final QName IETF_NETCONF_MONITORING_SCHEMA_LOCATION = QName.create(IETF_NETCONF_MONITORING, "location");
public static final QName IETF_NETCONF_MONITORING_SCHEMA_IDENTIFIER = QName.create(IETF_NETCONF_MONITORING, "identifier");
public static final QName IETF_NETCONF_MONITORING_SCHEMA_VERSION = QName.create(IETF_NETCONF_MONITORING, "version");
public static final QName IETF_NETCONF_MONITORING_SCHEMA_NAMESPACE = QName.create(IETF_NETCONF_MONITORING, "namespace");
public static final QName IETF_NETCONF_NOTIFICATIONS = QName.create(NetconfCapabilityChange.QNAME, "ietf-netconf-notifications");
public static URI NETCONF_URI = URI.create("urn:ietf:params:xml:ns:netconf:base:1.0");
public static QName NETCONF_QNAME = QName.create(NETCONF_URI.toString(), "2011-06-01", "netconf");
public static QName NETCONF_DATA_QNAME = QName.create(NETCONF_QNAME, "data");
public static QName NETCONF_RPC_REPLY_QNAME = QName.create(NETCONF_QNAME, "rpc-reply");
public static QName NETCONF_OK_QNAME = QName.create(NETCONF_QNAME, "ok");
public static QName NETCONF_ERROR_OPTION_QNAME = QName.create(NETCONF_QNAME, "error-option");
public static QName NETCONF_RUNNING_QNAME = QName.create(NETCONF_QNAME, "running");
public static QName NETCONF_SOURCE_QNAME = QName.create(NETCONF_QNAME, "source");
public static QName NETCONF_CANDIDATE_QNAME = QName.create(NETCONF_QNAME, "candidate");
public static QName NETCONF_TARGET_QNAME = QName.create(NETCONF_QNAME, "target");
public static QName NETCONF_CONFIG_QNAME = QName.create(NETCONF_QNAME, "config");
public static QName NETCONF_COMMIT_QNAME = QName.create(NETCONF_QNAME, "commit");
public static QName NETCONF_VALIDATE_QNAME = QName.create(NETCONF_QNAME, "validate");
public static QName NETCONF_COPY_CONFIG_QNAME = QName.create(NETCONF_QNAME, "copy-config");
public static QName NETCONF_OPERATION_QNAME = QName.create(NETCONF_QNAME, "operation");
public static QName NETCONF_DEFAULT_OPERATION_QNAME = QName.create(NETCONF_OPERATION_QNAME, "default-operation");
public static QName NETCONF_EDIT_CONFIG_QNAME = QName.create(NETCONF_QNAME, "edit-config");
public static QName NETCONF_GET_CONFIG_QNAME = QName.create(NETCONF_QNAME, "get-config");
public static QName NETCONF_DISCARD_CHANGES_QNAME = QName.create(NETCONF_QNAME, "discard-changes");
public static QName NETCONF_TYPE_QNAME = QName.create(NETCONF_QNAME, "type");
public static QName NETCONF_FILTER_QNAME = QName.create(NETCONF_QNAME, "filter");
public static QName NETCONF_GET_QNAME = QName.create(NETCONF_QNAME, "get");
public static QName NETCONF_RPC_QNAME = QName.create(NETCONF_QNAME, "rpc");
public static URI NETCONF_ROLLBACK_ON_ERROR_URI = URI
.create("urn:ietf:params:netconf:capability:rollback-on-error:1.0");
public static String ROLLBACK_ON_ERROR_OPTION = "rollback-on-error";
public static URI NETCONF_CANDIDATE_URI = URI
.create("urn:ietf:params:netconf:capability:candidate:1.0");
public static URI NETCONF_NOTIFICATONS_URI = URI
.create("urn:ietf:params:netconf:capability:notification:1.0");
public static URI NETCONF_RUNNING_WRITABLE_URI = URI
.create("urn:ietf:params:netconf:capability:writable-running:1.0");
public static QName NETCONF_LOCK_QNAME = QName.create(NETCONF_QNAME, "lock");
public static QName NETCONF_UNLOCK_QNAME = QName.create(NETCONF_QNAME, "unlock");
// Discard changes message
public static final ContainerNode DISCARD_CHANGES_RPC_CONTENT =
Builders.containerBuilder().withNodeIdentifier(new YangInstanceIdentifier.NodeIdentifier(NETCONF_DISCARD_CHANGES_QNAME)).build();
// Commit changes message
public static final ContainerNode COMMIT_RPC_CONTENT =
Builders.containerBuilder().withNodeIdentifier(new YangInstanceIdentifier.NodeIdentifier(NETCONF_COMMIT_QNAME)).build();
// Get message
public static final ContainerNode GET_RPC_CONTENT =
Builders.containerBuilder().withNodeIdentifier(new YangInstanceIdentifier.NodeIdentifier(NETCONF_GET_QNAME)).build();
// Create-subscription changes message
public static final ContainerNode CREATE_SUBSCRIPTION_RPC_CONTENT =
Builders.containerBuilder().withNodeIdentifier(new YangInstanceIdentifier.NodeIdentifier(CREATE_SUBSCRIPTION_RPC_QNAME)).build();
public static final DataContainerChild<?, ?> EMPTY_FILTER;
static {
final NormalizedNodeAttrBuilder<YangInstanceIdentifier.NodeIdentifier, DOMSource, AnyXmlNode> anyXmlBuilder = Builders.anyXmlBuilder().withNodeIdentifier(toId(NETCONF_FILTER_QNAME));
anyXmlBuilder.withAttributes(Collections.singletonMap(NETCONF_TYPE_QNAME, SUBTREE));
final Element element = XmlUtil.createElement(BLANK_DOCUMENT, NETCONF_FILTER_QNAME.getLocalName(), Optional.of(NETCONF_FILTER_QNAME.getNamespace().toString()));
element.setAttributeNS(NETCONF_FILTER_QNAME.getNamespace().toString(), NETCONF_TYPE_QNAME.getLocalName(), "subtree");
anyXmlBuilder.withValue(new DOMSource(element));
EMPTY_FILTER = anyXmlBuilder.build();
}
public static DataContainerChild<?, ?> toFilterStructure(final YangInstanceIdentifier identifier, final SchemaContext ctx) {
final NormalizedNodeAttrBuilder<YangInstanceIdentifier.NodeIdentifier, DOMSource, AnyXmlNode> anyXmlBuilder = Builders.anyXmlBuilder().withNodeIdentifier(toId(NETCONF_FILTER_QNAME));
anyXmlBuilder.withAttributes(Collections.singletonMap(NETCONF_TYPE_QNAME, SUBTREE));
final NormalizedNode<?, ?> filterContent = ImmutableNodes.fromInstanceId(ctx, identifier);
final Element element = XmlUtil.createElement(BLANK_DOCUMENT, NETCONF_FILTER_QNAME.getLocalName(), Optional.of(NETCONF_FILTER_QNAME.getNamespace().toString()));
element.setAttributeNS(NETCONF_FILTER_QNAME.getNamespace().toString(), NETCONF_TYPE_QNAME.getLocalName(), "subtree");
try {
writeNormalizedNode(filterContent, new DOMResult(element), SchemaPath.ROOT, ctx);
} catch (IOException | XMLStreamException e) {
throw new IllegalStateException("Unable to serialize filter element for path " + identifier, e);
}
anyXmlBuilder.withValue(new DOMSource(element));
return anyXmlBuilder.build();
}
public static void checkValidReply(final NetconfMessage input, final NetconfMessage output)
throws NetconfDocumentedException {
final String inputMsgId = input.getDocument().getDocumentElement().getAttribute(MESSAGE_ID_ATTR);
final String outputMsgId = output.getDocument().getDocumentElement().getAttribute(MESSAGE_ID_ATTR);
if(inputMsgId.equals(outputMsgId) == false) {
final Map<String,String> errorInfo = ImmutableMap.<String,String>builder()
.put( "actual-message-id", outputMsgId )
.put( "expected-message-id", inputMsgId )
.build();
throw new NetconfDocumentedException( "Response message contained unknown \"message-id\"",
null, NetconfDocumentedException.ErrorType.protocol,
NetconfDocumentedException.ErrorTag.bad_attribute,
NetconfDocumentedException.ErrorSeverity.error, errorInfo );
}
}
public static void checkSuccessReply(final NetconfMessage output) throws NetconfDocumentedException {
if(NetconfMessageUtil.isErrorMessage(output)) {
throw NetconfDocumentedException.fromXMLDocument(output.getDocument());
}
}
public static RpcError toRpcError( final NetconfDocumentedException ex ) {
final StringBuilder infoBuilder = new StringBuilder();
final Map<String, String> errorInfo = ex.getErrorInfo();
if(errorInfo != null) {
for( final Entry<String,String> e: errorInfo.entrySet() ) {
infoBuilder.append( '<' ).append( e.getKey() ).append( '>' ).append( e.getValue() )
.append( "</" ).append( e.getKey() ).append( '>' );
}
}
final ErrorSeverity severity = toRpcErrorSeverity( ex.getErrorSeverity() );
return severity == ErrorSeverity.ERROR ?
RpcResultBuilder.newError(
toRpcErrorType( ex.getErrorType() ), ex.getErrorTag().getTagValue(),
ex.getLocalizedMessage(), null, infoBuilder.toString(), ex.getCause() ) :
RpcResultBuilder.newWarning(
toRpcErrorType( ex.getErrorType() ), ex.getErrorTag().getTagValue(),
ex.getLocalizedMessage(), null, infoBuilder.toString(), ex.getCause() );
}
private static ErrorSeverity toRpcErrorSeverity( final NetconfDocumentedException.ErrorSeverity severity ) {
switch( severity ) {
case warning:
return RpcError.ErrorSeverity.WARNING;
default:
return RpcError.ErrorSeverity.ERROR;
}
}
private static RpcError.ErrorType toRpcErrorType(final NetconfDocumentedException.ErrorType type) {
switch( type ) {
case protocol:
return RpcError.ErrorType.PROTOCOL;
case rpc:
return RpcError.ErrorType.RPC;
case transport:
return RpcError.ErrorType.TRANSPORT;
default:
return RpcError.ErrorType.APPLICATION;
}
}
public static YangInstanceIdentifier.NodeIdentifier toId(final YangInstanceIdentifier.PathArgument qname) {
return toId(qname.getNodeType());
}
public static YangInstanceIdentifier.NodeIdentifier toId(final QName nodeType) {
return new YangInstanceIdentifier.NodeIdentifier(nodeType);
}
public static Element getDataSubtree(final Document doc) {
return (Element) doc.getElementsByTagNameNS(NETCONF_URI.toString(), "data").item(0);
}
public static boolean isDataRetrievalOperation(final QName rpc) {
return NETCONF_URI.equals(rpc.getNamespace())
&& (rpc.getLocalName().equals(NETCONF_GET_CONFIG_QNAME.getLocalName()) || rpc.getLocalName().equals(
NETCONF_GET_QNAME.getLocalName()));
}
public static ContainerSchemaNode createSchemaForDataRead(final SchemaContext schemaContext) {
final QName config = QName.create(NETCONF_EDIT_CONFIG_QNAME, "data");
return new NodeContainerProxy(config, schemaContext.getChildNodes());
}
public static ContainerSchemaNode createSchemaForNotification(final NotificationDefinition next) {
return new NodeContainerProxy(next.getQName(), next.getChildNodes(), next.getAvailableAugmentations());
}
public static ContainerNode wrap(final QName name, final DataContainerChild<?, ?>... node) {
return Builders.containerBuilder().withNodeIdentifier(toId(name)).withValue(Lists.newArrayList(node)).build();
}
public static DataContainerChild<?, ?> createEditConfigStructure(final SchemaContext ctx, final YangInstanceIdentifier dataPath,
final Optional<ModifyAction> operation, final Optional<NormalizedNode<?, ?>> lastChildOverride) {
final NormalizedNode<?, ?> configContent;
if (dataPath.isEmpty()) {
Preconditions.checkArgument(lastChildOverride.isPresent(), "Data has to be present when creating structure for top level element");
Preconditions.checkArgument(lastChildOverride.get() instanceof DataContainerChild<?, ?>,
"Data has to be either container or a list node when creating structure for top level element, but was: %s", lastChildOverride.get());
configContent = lastChildOverride.get();
} else {
final Entry<QName, ModifyAction> modifyOperation =
operation.isPresent() ? new AbstractMap.SimpleEntry<>(NETCONF_OPERATION_QNAME, operation.get()) : null;
configContent = ImmutableNodes.fromInstanceId(ctx, dataPath, lastChildOverride, Optional.fromNullable(modifyOperation));
}
final Element element = XmlUtil.createElement(BLANK_DOCUMENT, NETCONF_CONFIG_QNAME.getLocalName(), Optional.of(NETCONF_CONFIG_QNAME.getNamespace().toString()));
try {
writeNormalizedNode(configContent, new DOMResult(element), SchemaPath.ROOT, ctx);
} catch (IOException | XMLStreamException e) {
throw new IllegalStateException("Unable to serialize edit config content element for path " + dataPath, e);
}
final DOMSource value = new DOMSource(element);
return Builders.choiceBuilder().withNodeIdentifier(toId(EditContent.QNAME)).withChild(
Builders.anyXmlBuilder().withNodeIdentifier(toId(NETCONF_CONFIG_QNAME)).withValue(value).build()).build();
}
public static SchemaPath toPath(final QName rpc) {
return SchemaPath.create(true, rpc);
}
// FIXME similar code is in netconf-notifications-impl , DRY
public static void writeNormalizedNode(final NormalizedNode<?, ?> normalized, final DOMResult result, final SchemaPath schemaPath, final SchemaContext context)
throws IOException, XMLStreamException {
NormalizedNodeWriter normalizedNodeWriter = null;
NormalizedNodeStreamWriter normalizedNodeStreamWriter = null;
XMLStreamWriter writer = null;
try {
writer = XML_FACTORY.createXMLStreamWriter(result);
normalizedNodeStreamWriter = XMLStreamNormalizedNodeStreamWriter.create(writer, context, schemaPath);
normalizedNodeWriter = NormalizedNodeWriter.forStreamWriter(normalizedNodeStreamWriter);
normalizedNodeWriter.write(normalized);
normalizedNodeWriter.flush();
} finally {
try {
if(normalizedNodeWriter != null) {
normalizedNodeWriter.close();
}
if(normalizedNodeStreamWriter != null) {
normalizedNodeStreamWriter.close();
}
if(writer != null) {
writer.close();
}
} catch (final Exception e) {
LOG.warn("Unable to close resource properly", e);
}
}
}
}
| Make constants really constant
This triggers a major sonar warning, and is insecure as these fields could
get modified by anyone, throwing the system out of whack.
Also optimize comparison and make sure the netconf QNameModule is cached.
Also prevent unneeded QName instantiation.
Also use an immutable list to hold arguments, preventing a copy in
builders.
Change-Id: I74647c444ec273066fc4727618b35c92386ba77c
Signed-off-by: Robert Varga <[email protected]>
(cherry picked from commit fc48504605170d4d37346738602de388daa36f25)
| opendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/util/NetconfMessageTransformUtil.java | Make constants really constant | <ide><path>pendaylight/md-sal/sal-netconf-connector/src/main/java/org/opendaylight/controller/sal/connect/netconf/util/NetconfMessageTransformUtil.java
<ide>
<ide> import com.google.common.base.Optional;
<ide> import com.google.common.base.Preconditions;
<add>import com.google.common.collect.ImmutableList;
<ide> import com.google.common.collect.ImmutableMap;
<del>import com.google.common.collect.Lists;
<ide> import java.io.IOException;
<ide> import java.net.URI;
<ide> import java.util.AbstractMap;
<ide>
<ide> public static final QName IETF_NETCONF_NOTIFICATIONS = QName.create(NetconfCapabilityChange.QNAME, "ietf-netconf-notifications");
<ide>
<del> public static URI NETCONF_URI = URI.create("urn:ietf:params:xml:ns:netconf:base:1.0");
<del> public static QName NETCONF_QNAME = QName.create(NETCONF_URI.toString(), "2011-06-01", "netconf");
<del> public static QName NETCONF_DATA_QNAME = QName.create(NETCONF_QNAME, "data");
<del> public static QName NETCONF_RPC_REPLY_QNAME = QName.create(NETCONF_QNAME, "rpc-reply");
<del> public static QName NETCONF_OK_QNAME = QName.create(NETCONF_QNAME, "ok");
<del> public static QName NETCONF_ERROR_OPTION_QNAME = QName.create(NETCONF_QNAME, "error-option");
<del> public static QName NETCONF_RUNNING_QNAME = QName.create(NETCONF_QNAME, "running");
<del> public static QName NETCONF_SOURCE_QNAME = QName.create(NETCONF_QNAME, "source");
<del> public static QName NETCONF_CANDIDATE_QNAME = QName.create(NETCONF_QNAME, "candidate");
<del> public static QName NETCONF_TARGET_QNAME = QName.create(NETCONF_QNAME, "target");
<del> public static QName NETCONF_CONFIG_QNAME = QName.create(NETCONF_QNAME, "config");
<del> public static QName NETCONF_COMMIT_QNAME = QName.create(NETCONF_QNAME, "commit");
<del> public static QName NETCONF_VALIDATE_QNAME = QName.create(NETCONF_QNAME, "validate");
<del> public static QName NETCONF_COPY_CONFIG_QNAME = QName.create(NETCONF_QNAME, "copy-config");
<del> public static QName NETCONF_OPERATION_QNAME = QName.create(NETCONF_QNAME, "operation");
<del> public static QName NETCONF_DEFAULT_OPERATION_QNAME = QName.create(NETCONF_OPERATION_QNAME, "default-operation");
<del> public static QName NETCONF_EDIT_CONFIG_QNAME = QName.create(NETCONF_QNAME, "edit-config");
<del> public static QName NETCONF_GET_CONFIG_QNAME = QName.create(NETCONF_QNAME, "get-config");
<del> public static QName NETCONF_DISCARD_CHANGES_QNAME = QName.create(NETCONF_QNAME, "discard-changes");
<del> public static QName NETCONF_TYPE_QNAME = QName.create(NETCONF_QNAME, "type");
<del> public static QName NETCONF_FILTER_QNAME = QName.create(NETCONF_QNAME, "filter");
<del> public static QName NETCONF_GET_QNAME = QName.create(NETCONF_QNAME, "get");
<del> public static QName NETCONF_RPC_QNAME = QName.create(NETCONF_QNAME, "rpc");
<del>
<del> public static URI NETCONF_ROLLBACK_ON_ERROR_URI = URI
<add> public static final QName NETCONF_QNAME = QName.cachedReference(QName.create("urn:ietf:params:xml:ns:netconf:base:1.0", "2011-06-01", "netconf"));
<add> public static final URI NETCONF_URI = NETCONF_QNAME.getNamespace();
<add>
<add> public static final QName NETCONF_DATA_QNAME = QName.create(NETCONF_QNAME, "data");
<add> public static final QName NETCONF_RPC_REPLY_QNAME = QName.create(NETCONF_QNAME, "rpc-reply");
<add> public static final QName NETCONF_OK_QNAME = QName.create(NETCONF_QNAME, "ok");
<add> public static final QName NETCONF_ERROR_OPTION_QNAME = QName.create(NETCONF_QNAME, "error-option");
<add> public static final QName NETCONF_RUNNING_QNAME = QName.create(NETCONF_QNAME, "running");
<add> public static final QName NETCONF_SOURCE_QNAME = QName.create(NETCONF_QNAME, "source");
<add> public static final QName NETCONF_CANDIDATE_QNAME = QName.create(NETCONF_QNAME, "candidate");
<add> public static final QName NETCONF_TARGET_QNAME = QName.create(NETCONF_QNAME, "target");
<add> public static final QName NETCONF_CONFIG_QNAME = QName.create(NETCONF_QNAME, "config");
<add> public static final QName NETCONF_COMMIT_QNAME = QName.create(NETCONF_QNAME, "commit");
<add> public static final QName NETCONF_VALIDATE_QNAME = QName.create(NETCONF_QNAME, "validate");
<add> public static final QName NETCONF_COPY_CONFIG_QNAME = QName.create(NETCONF_QNAME, "copy-config");
<add> public static final QName NETCONF_OPERATION_QNAME = QName.create(NETCONF_QNAME, "operation");
<add> public static final QName NETCONF_DEFAULT_OPERATION_QNAME = QName.create(NETCONF_OPERATION_QNAME, "default-operation");
<add> public static final QName NETCONF_EDIT_CONFIG_QNAME = QName.create(NETCONF_QNAME, "edit-config");
<add> public static final QName NETCONF_GET_CONFIG_QNAME = QName.create(NETCONF_QNAME, "get-config");
<add> public static final QName NETCONF_DISCARD_CHANGES_QNAME = QName.create(NETCONF_QNAME, "discard-changes");
<add> public static final QName NETCONF_TYPE_QNAME = QName.create(NETCONF_QNAME, "type");
<add> public static final QName NETCONF_FILTER_QNAME = QName.create(NETCONF_QNAME, "filter");
<add> public static final QName NETCONF_GET_QNAME = QName.create(NETCONF_QNAME, "get");
<add> public static final QName NETCONF_RPC_QNAME = QName.create(NETCONF_QNAME, "rpc");
<add>
<add> public static final URI NETCONF_ROLLBACK_ON_ERROR_URI = URI
<ide> .create("urn:ietf:params:netconf:capability:rollback-on-error:1.0");
<del> public static String ROLLBACK_ON_ERROR_OPTION = "rollback-on-error";
<del>
<del> public static URI NETCONF_CANDIDATE_URI = URI
<add> public static final String ROLLBACK_ON_ERROR_OPTION = "rollback-on-error";
<add>
<add> public static final URI NETCONF_CANDIDATE_URI = URI
<ide> .create("urn:ietf:params:netconf:capability:candidate:1.0");
<ide>
<del> public static URI NETCONF_NOTIFICATONS_URI = URI
<add> public static final URI NETCONF_NOTIFICATONS_URI = URI
<ide> .create("urn:ietf:params:netconf:capability:notification:1.0");
<ide>
<del> public static URI NETCONF_RUNNING_WRITABLE_URI = URI
<add> public static final URI NETCONF_RUNNING_WRITABLE_URI = URI
<ide> .create("urn:ietf:params:netconf:capability:writable-running:1.0");
<ide>
<del> public static QName NETCONF_LOCK_QNAME = QName.create(NETCONF_QNAME, "lock");
<del> public static QName NETCONF_UNLOCK_QNAME = QName.create(NETCONF_QNAME, "unlock");
<add> public static final QName NETCONF_LOCK_QNAME = QName.create(NETCONF_QNAME, "lock");
<add> public static final QName NETCONF_UNLOCK_QNAME = QName.create(NETCONF_QNAME, "unlock");
<ide>
<ide> // Discard changes message
<ide> public static final ContainerNode DISCARD_CHANGES_RPC_CONTENT =
<ide> }
<ide>
<ide> private static ErrorSeverity toRpcErrorSeverity( final NetconfDocumentedException.ErrorSeverity severity ) {
<del> switch( severity ) {
<add> switch (severity) {
<ide> case warning:
<ide> return RpcError.ErrorSeverity.WARNING;
<ide> default:
<ide> }
<ide>
<ide> private static RpcError.ErrorType toRpcErrorType(final NetconfDocumentedException.ErrorType type) {
<del> switch( type ) {
<add> switch (type) {
<ide> case protocol:
<ide> return RpcError.ErrorType.PROTOCOL;
<ide> case rpc:
<ide>
<ide> public static boolean isDataRetrievalOperation(final QName rpc) {
<ide> return NETCONF_URI.equals(rpc.getNamespace())
<del> && (rpc.getLocalName().equals(NETCONF_GET_CONFIG_QNAME.getLocalName()) || rpc.getLocalName().equals(
<del> NETCONF_GET_QNAME.getLocalName()));
<add> && (NETCONF_GET_CONFIG_QNAME.getLocalName().equals(rpc.getLocalName())
<add> || NETCONF_GET_QNAME.getLocalName().equals(rpc.getLocalName()));
<ide> }
<ide>
<ide> public static ContainerSchemaNode createSchemaForDataRead(final SchemaContext schemaContext) {
<del> final QName config = QName.create(NETCONF_EDIT_CONFIG_QNAME, "data");
<del> return new NodeContainerProxy(config, schemaContext.getChildNodes());
<add> return new NodeContainerProxy(NETCONF_DATA_QNAME, schemaContext.getChildNodes());
<ide> }
<ide>
<ide> public static ContainerSchemaNode createSchemaForNotification(final NotificationDefinition next) {
<ide> }
<ide>
<ide> public static ContainerNode wrap(final QName name, final DataContainerChild<?, ?>... node) {
<del> return Builders.containerBuilder().withNodeIdentifier(toId(name)).withValue(Lists.newArrayList(node)).build();
<add> return Builders.containerBuilder().withNodeIdentifier(toId(name)).withValue(ImmutableList.copyOf(node)).build();
<ide> }
<ide>
<ide> public static DataContainerChild<?, ?> createEditConfigStructure(final SchemaContext ctx, final YangInstanceIdentifier dataPath, |
|
Java | apache-2.0 | 17c87fed1e022763a2896b675caefe42d5a9784a | 0 | ceylon/ceylon-js,ceylon/ceylon-js,ceylon/ceylon-js | package com.redhat.ceylon.compiler.js;
import java.io.IOException;
import java.io.StringWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Stack;
import org.antlr.runtime.CommonToken;
import com.redhat.ceylon.compiler.typechecker.analyzer.AnalysisWarning;
import com.redhat.ceylon.compiler.typechecker.model.Class;
import com.redhat.ceylon.compiler.typechecker.model.ClassOrInterface;
import com.redhat.ceylon.compiler.typechecker.model.Declaration;
import com.redhat.ceylon.compiler.typechecker.model.Functional;
import com.redhat.ceylon.compiler.typechecker.model.Getter;
import com.redhat.ceylon.compiler.typechecker.model.ImportableScope;
import com.redhat.ceylon.compiler.typechecker.model.Interface;
import com.redhat.ceylon.compiler.typechecker.model.InterfaceAlias;
import com.redhat.ceylon.compiler.typechecker.model.Method;
import com.redhat.ceylon.compiler.typechecker.model.MethodOrValue;
import com.redhat.ceylon.compiler.typechecker.model.Module;
import com.redhat.ceylon.compiler.typechecker.model.Package;
import com.redhat.ceylon.compiler.typechecker.model.ProducedType;
import com.redhat.ceylon.compiler.typechecker.model.Scope;
import com.redhat.ceylon.compiler.typechecker.model.Setter;
import com.redhat.ceylon.compiler.typechecker.model.Specification;
import com.redhat.ceylon.compiler.typechecker.model.TypeDeclaration;
import com.redhat.ceylon.compiler.typechecker.model.TypeParameter;
import com.redhat.ceylon.compiler.typechecker.model.Util;
import com.redhat.ceylon.compiler.typechecker.model.Value;
import com.redhat.ceylon.compiler.typechecker.tree.*;
import com.redhat.ceylon.compiler.typechecker.tree.Tree.*;
public class GenerateJsVisitor extends Visitor
implements NaturalVisitor {
private boolean indent=true;
private boolean comment=true;
private boolean verbose=false;
private final Stack<Continuation> continues = new Stack<Continuation>();
private final EnclosingFunctionVisitor encloser = new EnclosingFunctionVisitor();
private final JsIdentifierNames names;
private final Set<Declaration> directAccess = new HashSet<Declaration>();
private final RetainedVars retainedVars = new RetainedVars();
private final Map<String, String> importedModules;
final ConditionGenerator conds;
private final InvocationGenerator invoker;
private final List<CommonToken> tokens;
private int dynblock;
private final class SuperVisitor extends Visitor {
private final List<Declaration> decs;
private SuperVisitor(List<Declaration> decs) {
this.decs = decs;
}
@Override
public void visit(QualifiedMemberOrTypeExpression qe) {
if (qe.getPrimary() instanceof Super) {
decs.add(qe.getDeclaration());
}
super.visit(qe);
}
@Override
public void visit(BaseMemberOrTypeExpression that) {
if (that.getSupertypeQualifier() != null) {
decs.add(that.getDeclaration());
}
super.visit(that);
}
@Override
public void visit(QualifiedType that) {
if (that.getOuterType() instanceof SuperType) {
decs.add(that.getDeclarationModel());
}
super.visit(that);
}
public void visit(Tree.ClassOrInterface qe) {
//don't recurse
if (qe instanceof ClassDefinition) {
ExtendedType extType = ((ClassDefinition) qe).getExtendedType();
if (extType != null) { super.visit(extType); }
}
}
}
private final class OuterVisitor extends Visitor {
boolean found = false;
private Declaration dec;
private OuterVisitor(Declaration dec) {
this.dec = dec;
}
@Override
public void visit(QualifiedMemberOrTypeExpression qe) {
if (qe.getPrimary() instanceof Outer ||
qe.getPrimary() instanceof This) {
if ( qe.getDeclaration().equals(dec) ) {
found = true;
}
}
super.visit(qe);
}
}
private List<? extends Statement> currentStatements = null;
private final TypeUtils types;
private Writer out;
final boolean prototypeStyle;
private CompilationUnit root;
private static String clAlias="";
private static final String function="function ";
private boolean needIndent = true;
private int indentLevel = 0;
private static void setCLAlias(String alias) {
clAlias = alias + ".";
}
/** Returns the module name for the language module. */
static String getClAlias() { return clAlias; }
@Override
public void handleException(Exception e, Node that) {
that.addUnexpectedError(that.getMessage(e, this));
}
public GenerateJsVisitor(Writer out, boolean prototypeStyle, JsIdentifierNames names,
List<CommonToken> tokens, Map<String,String> imports, TypeUtils typeUtils) {
this.out = out;
this.prototypeStyle=prototypeStyle;
this.names = names;
conds = new ConditionGenerator(this, names, directAccess);
this.tokens = tokens;
importedModules = imports;
types = typeUtils;
invoker = new InvocationGenerator(this, names, retainedVars);
}
TypeUtils getTypeUtils() { return types; }
/** Tells the receiver whether to add comments to certain declarations. Default is true. */
public void setAddComments(boolean flag) { comment = flag; }
public boolean isAddComments() { return comment; }
/** Tells the receiver whether to indent the generated code. Default is true. */
public void setIndent(boolean flag) { indent = flag; }
/** Tells the receiver to be verbose (prints generated code to STDOUT in addition to writer) */
public void setVerbose(boolean flag) { verbose = flag; }
/** Returns the helper component to handle naming. */
JsIdentifierNames getNames() { return names; }
private static interface GenerateCallback {
public void generateValue();
}
/** Print generated code to the Writer specified at creation time.
* Automatically prints indentation first if necessary.
* @param code The main code
* @param codez Optional additional strings to print after the main code. */
void out(String code, String... codez) {
try {
if (indent && needIndent) {
for (int i=0;i<indentLevel;i++) {
out.write(" ");
}
}
needIndent = false;
out.write(code);
for (String s : codez) {
out.write(s);
}
if (verbose) {
System.out.print(code);
for (String s : codez) {
System.out.print(s);
}
}
}
catch (IOException ioe) {
throw new RuntimeException("Generating JS code", ioe);
}
}
/** Prints a newline. Indentation will automatically be printed by {@link #out(String, String...)}
* when the next line is started. */
void endLine() {
endLine(false);
}
/** Prints a newline. Indentation will automatically be printed by {@link #out(String, String...)}
* when the next line is started.
* @param semicolon if <code>true</code> then a semicolon is printed at the end
* of the previous line*/
void endLine(boolean semicolon) {
if (semicolon) { out(";"); }
out("\n");
needIndent = true;
}
/** Calls {@link #endLine()} if the current position is not already the beginning
* of a line. */
void beginNewLine() {
if (!needIndent) { endLine(); }
}
/** Increases indentation level, prints opening brace and newline. Indentation will
* automatically be printed by {@link #out(String, String...)} when the next line is started. */
void beginBlock() {
indentLevel++;
out("{");
endLine();
}
/** Decreases indentation level, prints a closing brace in new line (using
* {@link #beginNewLine()}) and calls {@link #endLine()}. */
void endBlockNewLine() {
endBlock(false, true);
}
/** Decreases indentation level, prints a closing brace in new line (using
* {@link #beginNewLine()}) and calls {@link #endLine()}.
* @param semicolon if <code>true</code> then prints a semicolon after the brace*/
void endBlockNewLine(boolean semicolon) {
endBlock(semicolon, true);
}
/** Decreases indentation level and prints a closing brace in new line (using
* {@link #beginNewLine()}). */
void endBlock() {
endBlock(false, false);
}
/** Decreases indentation level and prints a closing brace in new line (using
* {@link #beginNewLine()}).
* @param semicolon if <code>true</code> then prints a semicolon after the brace
* @param newline if <code>true</code> then additionally calls {@link #endLine()} */
void endBlock(boolean semicolon, boolean newline) {
indentLevel--;
beginNewLine();
out(semicolon ? "};" : "}");
if (newline) { endLine(); }
}
/** Prints source code location in the form "at [filename] ([location])" */
void location(Node node) {
out(" at ", node.getUnit().getFilename(), " (", node.getLocation(), ")");
}
private String generateToString(final GenerateCallback callback) {
final Writer oldWriter = out;
out = new StringWriter();
callback.generateValue();
final String str = out.toString();
out = oldWriter;
return str;
}
@Override
public void visit(CompilationUnit that) {
root = that;
Module clm = that.getUnit().getPackage().getModule()
.getLanguageModule();
if (!JsCompiler.compilingLanguageModule) {
require(clm);
setCLAlias(names.moduleAlias(clm));
}
for (CompilerAnnotation ca: that.getCompilerAnnotations()) {
ca.visit(this);
}
if (that.getImportList() != null) {
that.getImportList().visit(this);
}
visitStatements(that.getDeclarations());
}
public void visit(Import that) {
ImportableScope scope =
that.getImportMemberOrTypeList().getImportList().getImportedScope();
if (scope instanceof Package) {
require(((Package) scope).getModule());
}
}
private void require(Module mod) {
final String path = scriptPath(mod);
final String modAlias = names.moduleAlias(mod);
if (importedModules.put(path, modAlias) == null) {
out("var ", modAlias, "=require('", path, "');");
endLine();
}
}
private String scriptPath(Module mod) {
StringBuilder path = new StringBuilder(mod.getNameAsString().replace('.', '/')).append('/');
if (!mod.isDefault()) {
path.append(mod.getVersion()).append('/');
}
path.append(mod.getNameAsString());
if (!mod.isDefault()) {
path.append('-').append(mod.getVersion());
}
return path.toString();
}
@Override
public void visit(Parameter that) {
out(names.name(that.getDeclarationModel()));
}
@Override
public void visit(ParameterList that) {
out("(");
boolean first=true;
boolean ptypes = false;
//Check if this is the first parameter list
if (that.getScope() instanceof Method && that.getModel().isFirst()) {
ptypes = ((Method)that.getScope()).getTypeParameters() != null &&
!((Method)that.getScope()).getTypeParameters().isEmpty();
}
for (Parameter param: that.getParameters()) {
if (!first) out(",");
out(names.name(param.getDeclarationModel()));
first = false;
}
if (ptypes) {
if (!first) out(",");
out("$$$mptypes");
}
out(")");
}
private void visitStatements(List<? extends Statement> statements) {
List<String> oldRetainedVars = retainedVars.reset(null);
final List<? extends Statement> prevStatements = currentStatements;
currentStatements = statements;
for (int i=0; i<statements.size(); i++) {
Statement s = statements.get(i);
s.visit(this);
beginNewLine();
retainedVars.emitRetainedVars(this);
}
retainedVars.reset(oldRetainedVars);
currentStatements = prevStatements;
}
@Override
public void visit(Body that) {
visitStatements(that.getStatements());
}
@Override
public void visit(Block that) {
List<Statement> stmnts = that.getStatements();
if (stmnts.isEmpty()) {
out("{}");
}
else {
beginBlock();
initSelf(that);
visitStatements(stmnts);
endBlock();
}
}
private void initSelf(Block block) {
initSelf(block.getScope());
}
private void initSelf(Scope scope) {
if ((prototypeOwner != null) &&
((scope instanceof MethodOrValue)
|| (scope instanceof TypeDeclaration)
|| (scope instanceof Specification))) {
out("var ");
self(prototypeOwner);
out("=this;");
endLine();
}
}
private void comment(Tree.Declaration that) {
if (!comment) return;
endLine();
out("//", that.getNodeType(), " ", that.getDeclarationModel().getName());
location(that);
endLine();
}
private void var(Declaration d) {
out("var ", names.name(d), "=");
}
private boolean share(Declaration d) {
return share(d, true);
}
private boolean share(Declaration d, boolean excludeProtoMembers) {
boolean shared = false;
if (!(excludeProtoMembers && prototypeStyle && d.isClassOrInterfaceMember())
&& isCaptured(d)) {
beginNewLine();
outerSelf(d);
out(".", names.name(d), "=", names.name(d), ";");
endLine();
shared = true;
}
return shared;
}
@Override
public void visit(ClassDeclaration that) {
//Don't even bother with nodes that have errors
if (that.getErrors() != null && !that.getErrors().isEmpty()) {
//But warnings are ok
for (Message err : that.getErrors()) {
if (!(err instanceof AnalysisWarning)) {
return;
}
}
}
Class d = that.getDeclarationModel();
if (prototypeStyle && d.isClassOrInterfaceMember()) return;
comment(that);
Tree.ClassSpecifier ext = that.getClassSpecifier();
out(function, names.name(d), "(");
//Generate each parameter because we need to append one at the end
for (Parameter p: that.getParameterList().getParameters()) {
p.visit(this);
out(", ");
}
TypeArgumentList targs = ext.getType().getTypeArgumentList();
if (targs != null && !targs.getTypes().isEmpty()) {
out("$$targs$$,");
}
self(d);
out(")");
TypeDeclaration aliased = ext.getType().getDeclarationModel();
out("{return ");
qualify(ext.getType(), aliased);
out(names.name(aliased), "(");
if (ext.getInvocationExpression().getPositionalArgumentList() != null) {
ext.getInvocationExpression().getPositionalArgumentList().visit(this);
if (!ext.getInvocationExpression().getPositionalArgumentList().getPositionalArguments().isEmpty()) {
out(",");
}
} else {
out("/*PENDIENTE NAMED ARG CLASS DECL */");
}
if (targs != null && !targs.getTypes().isEmpty()) {
Map<TypeParameter, ProducedType> invargs = TypeUtils.matchTypeParametersWithArguments(
aliased.getTypeParameters(), targs.getTypeModels());
if (invargs != null) {
TypeUtils.printTypeArguments(that, invargs, this);
} else {
out("/*TARGS != TPARAMS!!!! WTF?????*/");
}
out(",");
}
self(d);
out(");}");
endLine();
out(names.name(d), ".$$=");
qualify(ext, aliased);
out(names.name(aliased), ".$$;");
endLine();
share(d);
}
private void addClassDeclarationToPrototype(TypeDeclaration outer, ClassDeclaration that) {
comment(that);
TypeDeclaration dec = that.getClassSpecifier().getType().getTypeModel().getDeclaration();
String path = qualifiedPath(that, dec, true);
if (path.length() > 0) {
path += '.';
}
out(names.self(outer), ".", names.name(that.getDeclarationModel()), "=",
path, names.name(dec), ";");
endLine();
}
@Override
public void visit(InterfaceDeclaration that) {
//Don't even bother with nodes that have errors
if (that.getErrors() != null && !that.getErrors().isEmpty()) return;
Interface d = that.getDeclarationModel();
if (prototypeStyle && d.isClassOrInterfaceMember()) return;
//It's pointless declaring interface aliases outside of classes/interfaces
Scope scope = that.getScope();
if (scope instanceof InterfaceAlias) {
scope = scope.getContainer();
if (!(scope instanceof ClassOrInterface)) return;
}
comment(that);
var(d);
TypeDeclaration dec = that.getTypeSpecifier().getType().getTypeModel()
.getDeclaration();
qualify(that,dec);
out(names.name(dec), ";");
endLine();
share(d);
}
private void addInterfaceDeclarationToPrototype(TypeDeclaration outer, InterfaceDeclaration that) {
comment(that);
TypeDeclaration dec = that.getTypeSpecifier().getType().getTypeModel().getDeclaration();
String path = qualifiedPath(that, dec, true);
if (path.length() > 0) {
path += '.';
}
out(names.self(outer), ".", names.name(that.getDeclarationModel()), "=",
path, names.name(dec), ";");
endLine();
}
private void addInterfaceToPrototype(ClassOrInterface type, InterfaceDefinition interfaceDef) {
interfaceDefinition(interfaceDef);
Interface d = interfaceDef.getDeclarationModel();
out(names.self(type), ".", names.name(d), "=", names.name(d), ";");
endLine();
}
@Override
public void visit(InterfaceDefinition that) {
//Don't even bother with nodes that have errors
if (that.getErrors() != null && !that.getErrors().isEmpty()) return;
if (!(prototypeStyle && that.getDeclarationModel().isClassOrInterfaceMember())) {
interfaceDefinition(that);
}
}
private void interfaceDefinition(InterfaceDefinition that) {
Interface d = that.getDeclarationModel();
comment(that);
out(function, names.name(d), "(");
self(d);
out(")");
beginBlock();
//declareSelf(d);
referenceOuter(d);
final List<Declaration> superDecs = new ArrayList<Declaration>();
if (!prototypeStyle) {
new SuperVisitor(superDecs).visit(that.getInterfaceBody());
}
callInterfaces(that.getSatisfiedTypes(), d, that, superDecs);
that.getInterfaceBody().visit(this);
//returnSelf(d);
endBlockNewLine();
share(d);
typeInitialization(that);
}
private void addClassToPrototype(ClassOrInterface type, ClassDefinition classDef) {
classDefinition(classDef);
Class d = classDef.getDeclarationModel();
out(names.self(type), ".", names.name(d), "=", names.name(d), ";");
endLine();
}
@Override
public void visit(ClassDefinition that) {
//Don't even bother with nodes that have errors
if (that.getErrors() != null && !that.getErrors().isEmpty()) return;
if (!(prototypeStyle && that.getDeclarationModel().isClassOrInterfaceMember())) {
classDefinition(that);
}
}
private void classDefinition(ClassDefinition that) {
Class d = that.getDeclarationModel();
comment(that);
out(function, names.name(d), "(");
for (Parameter p: that.getParameterList().getParameters()) {
p.visit(this);
out(", ");
}
boolean withTargs = that.getTypeParameterList() != null &&
!that.getTypeParameterList().getTypeParameterDeclarations().isEmpty();
if (withTargs) {
out("$$targs$$,");
}
self(d);
out(")");
beginBlock();
//This takes care of top-level attributes defined before the class definition
out("$init$", names.name(d), "();");
endLine();
declareSelf(d);
if (withTargs) {
out(clAlias, "set_type_args(");
self(d); out(",$$targs$$);"); endLine();
} else {
//Check if any of the satisfied types have type arguments
if (that.getSatisfiedTypes() != null) {
for(Tree.StaticType sat : that.getSatisfiedTypes().getTypes()) {
boolean first = true;
Map<TypeParameter,ProducedType> targs = sat.getTypeModel().getTypeArguments();
if (targs != null && !targs.isEmpty()) {
if (first) {
self(d); out(".$$targs$$=");
TypeUtils.printTypeArguments(that, targs, this);
endLine(true);
} else {
out("/*TODO: more type arguments*/");
endLine();
}
}
}
}
}
referenceOuter(d);
initParameters(that.getParameterList(), d);
final List<Declaration> superDecs = new ArrayList<Declaration>();
if (!prototypeStyle) {
new SuperVisitor(superDecs).visit(that.getClassBody());
}
callSuperclass(that.getExtendedType(), d, that, superDecs);
callInterfaces(that.getSatisfiedTypes(), d, that, superDecs);
that.getClassBody().visit(this);
returnSelf(d);
endBlockNewLine();
share(d);
typeInitialization(that);
}
private void referenceOuter(TypeDeclaration d) {
if (prototypeStyle && d.isClassOrInterfaceMember()) {
self(d);
out(".");
outerSelf(d);
out("=this;");
endLine();
}
}
private void copySuperMembers(TypeDeclaration typeDecl, final List<Declaration> decs, ClassOrInterface d) {
if (!prototypeStyle) {
for (Declaration dec: decs) {
if (!typeDecl.isMember(dec)) { continue; }
String suffix = names.scopeSuffix(dec.getContainer());
if (dec instanceof Value) {
superGetterRef(dec,d,suffix);
if (((Value) dec).isVariable()) {
superSetterRef(dec,d,suffix);
}
}
else if (dec instanceof Getter) {
superGetterRef(dec,d,suffix);
if (((Getter) dec).isVariable()) {
superSetterRef(dec,d,suffix);
}
}
else {
superRef(dec,d,suffix);
}
}
}
}
private void callSuperclass(ExtendedType extendedType, Class d, Node that,
final List<Declaration> superDecs) {
if (extendedType!=null) {
PositionalArgumentList argList = extendedType.getInvocationExpression()
.getPositionalArgumentList();
TypeDeclaration typeDecl = extendedType.getType().getDeclarationModel();
out(memberAccessBase(extendedType.getType(), typeDecl, false, qualifiedPath(that, typeDecl)),
(prototypeStyle && (getSuperMemberScope(extendedType.getType()) != null))
? ".call(this," : "(");
invoker.generatePositionalArguments(argList, argList.getPositionalArguments(), false);
if (argList.getPositionalArguments().size() > 0) {
out(",");
}
//There may be defaulted args we must pass as undefined
if (d.getExtendedTypeDeclaration().getParameterList().getParameters().size() > argList.getPositionalArguments().size()) {
List<com.redhat.ceylon.compiler.typechecker.model.Parameter> superParams = d.getExtendedTypeDeclaration().getParameterList().getParameters();
for (int i = argList.getPositionalArguments().size(); i < superParams.size(); i++) {
com.redhat.ceylon.compiler.typechecker.model.Parameter p = superParams.get(i);
if (p.isSequenced()) {
out(clAlias, "getEmpty(),");
} else {
out("undefined,");
}
}
}
//If the supertype has type arguments, add them to the call
if (typeDecl.getTypeParameters() != null && !typeDecl.getTypeParameters().isEmpty()) {
extendedType.getType().getTypeArgumentList().getTypeModels();
TypeUtils.printTypeArguments(that, TypeUtils.matchTypeParametersWithArguments(typeDecl.getTypeParameters(),
extendedType.getType().getTypeArgumentList().getTypeModels()), this);
out(",");
}
self(d);
out(");");
endLine();
copySuperMembers(typeDecl, superDecs, d);
}
}
private void callInterfaces(SatisfiedTypes satisfiedTypes, ClassOrInterface d, Node that,
final List<Declaration> superDecs) {
if (satisfiedTypes!=null) {
for (StaticType st: satisfiedTypes.getTypes()) {
TypeDeclaration typeDecl = st.getTypeModel().getDeclaration();
if (typeDecl.isAlias()) {
typeDecl = typeDecl.getExtendedTypeDeclaration();
}
qualify(that, typeDecl);
out(names.name((ClassOrInterface)typeDecl), "(");
self(d);
out(");");
endLine();
//Set the reified types from interfaces
Map<TypeParameter, ProducedType> reifs = st.getTypeModel().getTypeArguments();
if (reifs != null && !reifs.isEmpty()) {
for (Map.Entry<TypeParameter, ProducedType> e : reifs.entrySet()) {
if (e.getValue().getDeclaration() instanceof ClassOrInterface) {
out(clAlias, "add_type_arg(");
self(d);
out(",'", e.getKey().getName(), "',");
TypeUtils.typeNameOrList(that, e.getValue(), this, true);
out(");");
endLine();
}
}
}
copySuperMembers(typeDecl, superDecs, d);
}
}
}
/** Generates a function to initialize the specified type. */
private void typeInitialization(final Tree.Declaration type) {
ExtendedType extendedType = null;
SatisfiedTypes satisfiedTypes = null;
boolean isInterface = false;
ClassOrInterface decl = null;
if (type instanceof ClassDefinition) {
ClassDefinition classDef = (ClassDefinition) type;
extendedType = classDef.getExtendedType();
satisfiedTypes = classDef.getSatisfiedTypes();
decl = classDef.getDeclarationModel();
} else if (type instanceof InterfaceDefinition) {
satisfiedTypes = ((InterfaceDefinition) type).getSatisfiedTypes();
isInterface = true;
decl = ((InterfaceDefinition) type).getDeclarationModel();
} else if (type instanceof ObjectDefinition) {
ObjectDefinition objectDef = (ObjectDefinition) type;
extendedType = objectDef.getExtendedType();
satisfiedTypes = objectDef.getSatisfiedTypes();
decl = (ClassOrInterface)objectDef.getDeclarationModel().getTypeDeclaration();
}
final PrototypeInitCallback callback = new PrototypeInitCallback() {
@Override
public void addToPrototypeCallback() {
if (type instanceof ClassDefinition) {
addToPrototype(((ClassDefinition)type).getDeclarationModel(), ((ClassDefinition)type).getClassBody().getStatements());
} else if (type instanceof InterfaceDefinition) {
addToPrototype(((InterfaceDefinition)type).getDeclarationModel(), ((InterfaceDefinition)type).getInterfaceBody().getStatements());
}
}
};
typeInitialization(extendedType, satisfiedTypes, isInterface, decl, callback);
}
/** This is now the main method to generate the type initialization code.
* @param extendedType The type that is being extended.
* @param satisfiedTypes The types satisfied by the type being initialized.
* @param isInterface Tells whether the type being initialized is an interface
* @param d The declaration for the type being initialized
* @param callback A callback to add something more to the type initializer in prototype style.
*/
private void typeInitialization(ExtendedType extendedType, SatisfiedTypes satisfiedTypes, boolean isInterface,
ClassOrInterface d, PrototypeInitCallback callback) {
//Let's always use initTypeProto to avoid #113
String initFuncName = "initTypeProto";
out("function $init$", names.name(d), "()");
beginBlock();
out("if (", names.name(d), ".$$===undefined)");
beginBlock();
String qns = d.getQualifiedNameString();
if (JsCompiler.compilingLanguageModule && qns.indexOf("::") < 0) {
//Language module files get compiled in default module
//so they need to have this added to their qualified name
qns = "ceylon.language::" + qns;
}
out(clAlias, initFuncName, "(", names.name(d), ",'", qns, "'");
if (extendedType != null) {
String fname = typeFunctionName(extendedType.getType(), false);
out(",", fname);
} else if (!isInterface) {
out(",", clAlias, "Basic");
}
if (satisfiedTypes != null) {
for (StaticType satType : satisfiedTypes.getTypes()) {
TypeDeclaration tdec = satType.getTypeModel().getDeclaration();
if (tdec.isAlias()) {
tdec = tdec.getExtendedTypeDeclaration();
}
String fname = typeFunctionName(satType, true);
//Actually it could be "if not in same module"
if (!JsCompiler.compilingLanguageModule && declaredInCL(tdec)) {
out(",", fname);
} else {
int idx = fname.lastIndexOf('.');
if (idx > 0) {
fname = fname.substring(0, idx+1) + "$init$" + fname.substring(idx+1);
} else {
fname = "$init$" + fname;
}
out(",", fname, "()");
}
}
}
out(");");
//The class definition needs to be inside the init function if we want forwards decls to work in prototype style
if (prototypeStyle) {
endLine();
callback.addToPrototypeCallback();
}
endBlockNewLine();
out("return ", names.name(d), ";");
endBlockNewLine();
//If it's nested, share the init function
if (outerSelf(d)) {
out(".$init$", names.name(d), "=$init$", names.name(d), ";");
endLine();
}
out("$init$", names.name(d), "();");
endLine();
}
private String typeFunctionName(StaticType type, boolean removeAlias) {
TypeDeclaration d = type.getTypeModel().getDeclaration();
if (removeAlias && d.isAlias()) {
d = d.getExtendedTypeDeclaration();
}
boolean inProto = prototypeStyle
&& (type.getScope().getContainer() instanceof TypeDeclaration);
return memberAccessBase(type, d, false, qualifiedPath(type, d, inProto));
}
private void addToPrototype(ClassOrInterface d, List<Statement> statements) {
if (prototypeStyle && !statements.isEmpty()) {
final List<? extends Statement> prevStatements = currentStatements;
currentStatements = statements;
out("(function(", names.self(d), ")");
beginBlock();
for (Statement s: statements) {
addToPrototype(d, s);
}
endBlock();
out(")(", names.name(d), ".$$.prototype);");
endLine();
currentStatements = prevStatements;
}
}
private ClassOrInterface prototypeOwner;
private void addToPrototype(ClassOrInterface d, Statement s) {
ClassOrInterface oldPrototypeOwner = prototypeOwner;
prototypeOwner = d;
if (s instanceof MethodDefinition) {
addMethodToPrototype(d, (MethodDefinition)s);
} else if (s instanceof MethodDeclaration) {
methodDeclaration(d, (MethodDeclaration) s);
} else if (s instanceof AttributeGetterDefinition) {
addGetterToPrototype(d, (AttributeGetterDefinition)s);
} else if (s instanceof AttributeDeclaration) {
addGetterAndSetterToPrototype(d, (AttributeDeclaration) s);
} else if (s instanceof ClassDefinition) {
addClassToPrototype(d, (ClassDefinition) s);
} else if (s instanceof InterfaceDefinition) {
addInterfaceToPrototype(d, (InterfaceDefinition) s);
} else if (s instanceof ObjectDefinition) {
addObjectToPrototype(d, (ObjectDefinition) s);
} else if (s instanceof ClassDeclaration) {
addClassDeclarationToPrototype(d, (ClassDeclaration) s);
} else if (s instanceof InterfaceDeclaration) {
addInterfaceDeclarationToPrototype(d, (InterfaceDeclaration) s);
} else if (s instanceof SpecifierStatement) {
addSpecifierToPrototype(d, (SpecifierStatement) s);
}
prototypeOwner = oldPrototypeOwner;
}
private void declareSelf(ClassOrInterface d) {
out("if (");
self(d);
out("===undefined)");
self(d);
out("=new ");
if (prototypeStyle && d.isClassOrInterfaceMember()) {
out("this.", names.name(d), ".$$;");
} else {
out(names.name(d), ".$$;");
}
endLine();
/*out("var ");
self(d);
out("=");
self();
out(";");
endLine();*/
}
private void instantiateSelf(ClassOrInterface d) {
out("var ");
self(d);
out("=new ");
if (prototypeStyle && d.isClassOrInterfaceMember()) {
out("this.", names.name(d), ".$$;");
} else {
out(names.name(d), ".$$;");
}
endLine();
}
private void returnSelf(ClassOrInterface d) {
out("return ");
self(d);
out(";");
}
private void addObjectToPrototype(ClassOrInterface type, ObjectDefinition objDef) {
objectDefinition(objDef);
Value d = objDef.getDeclarationModel();
Class c = (Class) d.getTypeDeclaration();
out(names.self(type), ".", names.name(c), "=", names.name(c), ";");
endLine();
}
@Override
public void visit(ObjectDefinition that) {
//Don't even bother with nodes that have errors
if (that.getErrors() != null && !that.getErrors().isEmpty()) return;
Value d = that.getDeclarationModel();
if (!(prototypeStyle && d.isClassOrInterfaceMember())) {
objectDefinition(that);
} else {
Class c = (Class) d.getTypeDeclaration();
comment(that);
outerSelf(d);
out(".", names.name(d), "=");
outerSelf(d);
out(".", names.name(c), "();");
endLine();
}
}
private void objectDefinition(ObjectDefinition that) {
comment(that);
Value d = that.getDeclarationModel();
boolean addToPrototype = prototypeStyle && d.isClassOrInterfaceMember();
Class c = (Class) d.getTypeDeclaration();
out(function, names.name(c));
Map<TypeParameter, ProducedType> targs=new HashMap<TypeParameter, ProducedType>();
if (that.getSatisfiedTypes() != null) {
for (StaticType st : that.getSatisfiedTypes().getTypes()) {
Map<TypeParameter, ProducedType> stargs = st.getTypeModel().getTypeArguments();
if (stargs != null && !stargs.isEmpty()) {
targs.putAll(stargs);
}
}
}
out(targs.isEmpty()?"()":"($$targs$$)");
beginBlock();
instantiateSelf(c);
referenceOuter(c);
final List<Declaration> superDecs = new ArrayList<Declaration>();
if (!prototypeStyle) {
new SuperVisitor(superDecs).visit(that.getClassBody());
}
if (!targs.isEmpty()) {
self(c); out(".$$targs$$=$$targs$$;"); endLine();
}
callSuperclass(that.getExtendedType(), c, that, superDecs);
callInterfaces(that.getSatisfiedTypes(), c, that, superDecs);
that.getClassBody().visit(this);
returnSelf(c);
indentLevel--;
endLine();
out("}");
endLine();
typeInitialization(that);
addToPrototype(c, that.getClassBody().getStatements());
if (!addToPrototype) {
out("var ", names.name(d), "=", names.name(c), "(");
if (!targs.isEmpty()) {
TypeUtils.printTypeArguments(that, targs, this);
}
out(");");
endLine();
}
if (!defineAsProperty(d)) {
out("var ", names.getter(d), "=function()");
beginBlock();
out("return ");
if (addToPrototype) {
out("this.");
}
out(names.name(d), ";");
endBlockNewLine();
if (addToPrototype || d.isShared()) {
outerSelf(d);
out(".", names.getter(d), "=", names.getter(d), ";");
endLine();
}
}
}
private void superRef(Declaration d, ClassOrInterface sub, String parentSuffix) {
//if (d.isActual()) {
self(sub);
out(".", names.name(d), parentSuffix, "=");
self(sub);
out(".", names.name(d), ";");
endLine();
//}
}
private void superGetterRef(Declaration d, ClassOrInterface sub, String parentSuffix) {
if (defineAsProperty(d)) {
out(clAlias, "copySuperAttr(", names.self(sub), ",'", names.name(d), "','",
parentSuffix, "');");
}
else {
self(sub);
out(".", names.getter(d), parentSuffix, "=");
self(sub);
out(".", names.getter(d), ";");
}
endLine();
}
private void superSetterRef(Declaration d, ClassOrInterface sub, String parentSuffix) {
if (!defineAsProperty(d)) {
self(sub);
out(".", names.setter(d), parentSuffix, "=");
self(sub);
out(".", names.setter(d), ";");
endLine();
}
}
@Override
public void visit(MethodDeclaration that) {
//Don't even bother with nodes that have errors
if (that.getErrors() != null && !that.getErrors().isEmpty()) return;
methodDeclaration(null, that);
}
private void methodDeclaration(TypeDeclaration outer, MethodDeclaration that) {
Method m = that.getDeclarationModel();
if (that.getSpecifierExpression() != null) {
// method(params) => expr
if (outer == null) {
// Not in a prototype definition. Null to do here if it's a
// member in prototype style.
if (prototypeStyle && m.isMember()) { return; }
comment(that);
out("var ");
}
else {
// prototype definition
comment(that);
out(names.self(outer), ".");
}
out(names.name(m), "=");
singleExprFunction(that.getParameterLists(),
that.getSpecifierExpression().getExpression(), that.getScope());
endLine(true);
share(m);
}
else if (outer == null) { // don't do the following in a prototype definition
//Check for refinement of simple param declaration
if (m == that.getScope()) {
if (m.getContainer() instanceof Class && m.isClassOrInterfaceMember()) {
//Declare the method just by pointing to the param function
final String name = names.name(((Class)m.getContainer()).getParameter(m.getName()));
if (name != null) {
self((Class)m.getContainer());
out(".", names.name(m), "=", name, ";");
endLine();
}
} else if (m.getContainer() instanceof Method) {
//Declare the function just by forcing the name we used in the param list
final String name = names.name(((Method)m.getContainer()).getParameter(m.getName()));
if (names != null) {
names.forceName(m, name);
}
}
}
}
}
@Override
public void visit(MethodDefinition that) {
Method d = that.getDeclarationModel();
//Don't even bother with nodes that have errors
if (that.getErrors() != null && !that.getErrors().isEmpty()) return;
if (!((prototypeStyle && that.getDeclarationModel().isClassOrInterfaceMember()) || isNative(d))) {
comment(that);
methodDefinition(that);
}
}
private void methodDefinition(MethodDefinition that) {
Method d = that.getDeclarationModel();
if (that.getParameterLists().size() == 1) {
out(function, names.name(d));
ParameterList paramList = that.getParameterLists().get(0);
paramList.visit(this);
beginBlock();
initSelf(that.getBlock());
initParameters(paramList, null);
visitStatements(that.getBlock().getStatements());
endBlock();
} else {
int count=0;
for (ParameterList paramList : that.getParameterLists()) {
if (count==0) {
out(function, names.name(d));
} else {
out("return function");
}
paramList.visit(this);
beginBlock();
initSelf(that.getBlock());
initParameters(paramList, null);
count++;
}
visitStatements(that.getBlock().getStatements());
for (int i=0; i < count; i++) {
endBlock();
}
}
if (!share(d)) { out(";"); }
}
private void initParameters(ParameterList params, TypeDeclaration typeDecl) {
for (final Parameter param : params.getParameters()) {
com.redhat.ceylon.compiler.typechecker.model.Parameter pd = param.getDeclarationModel();
/*if (param instanceof ValueParameterDeclaration && ((ValueParameterDeclaration)param).getDeclarationModel().isHidden()) {
//TODO support new syntax for class and method parameters
//the declaration is actually different from the one we usually use
out("//HIDDEN! ", pd.getName(), "(", names.name(pd), ")"); endLine();
}*/
String paramName = names.name(pd);
if (param.getDefaultArgument() != null || pd.isSequenced()) {
out("if(", paramName, "===undefined){", paramName, "=");
if (param.getDefaultArgument() == null) {
out(clAlias, "getEmpty()");
} else {
final SpecifierExpression defaultExpr =
param.getDefaultArgument().getSpecifierExpression();
if ((param instanceof FunctionalParameterDeclaration)
&& (defaultExpr instanceof LazySpecifierExpression)) {
// function parameter defaulted using "=>"
singleExprFunction(
((FunctionalParameterDeclaration) param).getParameterLists(),
defaultExpr.getExpression(), null);
}
else {
defaultExpr.visit(this);
}
}
out(";}");
endLine();
}
if ((typeDecl != null) && pd.isCaptured()) {
self(typeDecl);
out(".", paramName, "=", paramName, ";");
endLine();
}
}
}
private void addMethodToPrototype(TypeDeclaration outer,
MethodDefinition that) {
Method d = that.getDeclarationModel();
if (!prototypeStyle||!d.isClassOrInterfaceMember()) return;
comment(that);
out(names.self(outer), ".", names.name(d), "=");
methodDefinition(that);
}
@Override
public void visit(AttributeGetterDefinition that) {
Getter d = that.getDeclarationModel();
if (prototypeStyle&&d.isClassOrInterfaceMember()) return;
comment(that);
if (defineAsProperty(d)) {
out(clAlias, "defineAttr(");
outerSelf(d);
out(",'", names.name(d), "',function()");
super.visit(that);
final AttributeSetterDefinition setterDef = associatedSetterDefinition(that);
if (setterDef != null) {
out(",function(", names.name(setterDef.getDeclarationModel().getParameter()), ")");
super.visit(setterDef);
}
out(");");
}
else {
out("var ", names.getter(d), "=function()");
super.visit(that);
if (!shareGetter(d)) { out(";"); }
}
}
private void addGetterToPrototype(TypeDeclaration outer,
AttributeGetterDefinition that) {
Getter d = that.getDeclarationModel();
if (!prototypeStyle||!d.isClassOrInterfaceMember()) return;
comment(that);
out(clAlias, "defineAttr(", names.self(outer), ",'", names.name(d),
"',function()");
super.visit(that);
final AttributeSetterDefinition setterDef = associatedSetterDefinition(that);
if (setterDef != null) {
out(",function(", names.name(setterDef.getDeclarationModel().getParameter()), ")");
super.visit(setterDef);
}
out(");");
}
private AttributeSetterDefinition associatedSetterDefinition(
AttributeGetterDefinition getterDef) {
final Setter setter = getterDef.getDeclarationModel().getSetter();
if ((setter != null) && (currentStatements != null)) {
for (Statement stmt : currentStatements) {
if (stmt instanceof AttributeSetterDefinition) {
final AttributeSetterDefinition setterDef =
(AttributeSetterDefinition) stmt;
if (setterDef.getDeclarationModel() == setter) {
return setterDef;
}
}
}
}
return null;
}
/** Exports a getter function; useful in non-prototype style. */
private boolean shareGetter(MethodOrValue d) {
boolean shared = false;
if (isCaptured(d)) {
beginNewLine();
outerSelf(d);
out(".", names.getter(d), "=", names.getter(d), ";");
endLine();
shared = true;
}
return shared;
}
@Override
public void visit(AttributeSetterDefinition that) {
Setter d = that.getDeclarationModel();
if ((prototypeStyle&&d.isClassOrInterfaceMember()) || defineAsProperty(d)) return;
comment(that);
out("var ", names.setter(d.getGetter()), "=function(", names.name(d.getParameter()), ")");
super.visit(that);
if (!shareSetter(d)) { out(";"); }
}
private boolean isCaptured(Declaration d) {
if (d.isToplevel()||d.isClassOrInterfaceMember()) { //TODO: what about things nested inside control structures
if (d.isShared() || d.isCaptured() ) {
return true;
}
else {
OuterVisitor ov = new OuterVisitor(d);
ov.visit(root);
return ov.found;
}
}
else {
return false;
}
}
private boolean shareSetter(MethodOrValue d) {
boolean shared = false;
if (isCaptured(d)) {
beginNewLine();
outerSelf(d);
out(".", names.setter(d), "=", names.setter(d), ";");
endLine();
shared = true;
}
return shared;
}
@Override
public void visit(AttributeDeclaration that) {
Value d = that.getDeclarationModel();
//Check if the attribute corresponds to a class parameter
//This is because of the new initializer syntax
String classParam = null;
if (d.getContainer() instanceof Functional) {
classParam = names.name(((Functional)d.getContainer()).getParameter(d.getName()));
}
if (!d.isFormal()) {
comment(that);
SpecifierOrInitializerExpression specInitExpr =
that.getSpecifierOrInitializerExpression();
if (prototypeStyle && d.isClassOrInterfaceMember()) {
if ((specInitExpr != null)
&& !(specInitExpr instanceof LazySpecifierExpression)) {
outerSelf(d);
out(".", names.name(d), "=");
super.visit(that);
endLine(true);
} else if (classParam != null) {
outerSelf(d);
out(".", names.name(d), "=", classParam);
endLine(true);
}
//TODO generate for => expr when no classParam is available
}
else if (specInitExpr instanceof LazySpecifierExpression) {
final boolean property = defineAsProperty(d);
if (property) {
out(clAlias, "defineAttr(");
outerSelf(d);
out(",'", names.name(d), "',function(){ return ");
} else {
out("var ", names.getter(d), "=function(){return ");
}
int boxType = boxStart(specInitExpr.getExpression().getTerm());
specInitExpr.getExpression().visit(this);
boxUnboxEnd(boxType);
out(";}");
if (property) {
out(");");
endLine();
} else {
endLine(true);
shareGetter(d);
}
}
else {
if ((specInitExpr != null) || (classParam != null) || !d.isMember()
|| d.isVariable()) {
generateAttributeGetter(d, specInitExpr, classParam);
}
if (d.isVariable() && !defineAsProperty(d)) {
final String varName = names.name(d);
String paramVarName = names.createTempVariable(d.getName());
out("var ", names.setter(d), "=function(", paramVarName, "){return ");
out(varName, "=", paramVarName, ";};");
endLine();
shareSetter(d);
}
}
}
}
private void generateAttributeGetter(MethodOrValue decl,
SpecifierOrInitializerExpression expr, String param) {
final String varName = names.name(decl);
out("var ", varName);
if (expr != null) {
out("=");
int boxType = boxStart(expr.getExpression().getTerm());
if (dynblock > 0 && TypeUtils.isUnknown(expr.getExpression().getTypeModel()) && !TypeUtils.isUnknown(decl.getType())) {
TypeUtils.generateDynamicCheck(expr.getExpression(), decl.getType(), this);
} else {
expr.visit(this);
}
boxUnboxEnd(boxType);
} else if (param != null) {
out("=", param);
}
endLine(true);
if (decl instanceof Method) {
if (decl.isClassOrInterfaceMember() && isCaptured(decl)) {
beginNewLine();
outerSelf(decl);
out(".", names.name(decl), "=", names.name(decl), ";");
endLine();
}
} else {
if (isCaptured(decl)) {
if (defineAsProperty(decl)) {
out(clAlias, "defineAttr(");
outerSelf(decl);
out(",'", varName, "',function(){return ", varName, ";}");
if (decl.isVariable()) {
final String par = names.createTempVariable(decl.getName());
out(",function(", par, "){return ", varName, "=", par, ";}");
}
out(");");
endLine();
}
else {
if (decl.isMember()) {
out("delete ");
outerSelf(decl);
out(".", varName);
endLine(true);
}
out("var ", names.getter(decl),"=function(){return ", varName, ";};");
endLine();
shareGetter(decl);
}
} else {
directAccess.add(decl);
}
}
}
private void addGetterAndSetterToPrototype(TypeDeclaration outer,
AttributeDeclaration that) {
Value d = that.getDeclarationModel();
if (!prototypeStyle||d.isToplevel()) return;
if (!d.isFormal()) {
comment(that);
String classParam = null;
if (d.getContainer() instanceof Functional) {
classParam = names.name(((Functional)d.getContainer()).getParameter(d.getName()));
}
if ((that.getSpecifierOrInitializerExpression() != null) || d.isVariable()
|| (classParam != null)) {
if (that.getSpecifierOrInitializerExpression()
instanceof LazySpecifierExpression) {
// attribute is defined by a lazy expression ("=>" syntax)
out(clAlias, "defineAttr(", names.self(outer), ",'", names.name(d),
"',function()");
beginBlock();
initSelf(that.getScope());
out("return ");
Expression expr = that.getSpecifierOrInitializerExpression().getExpression();
int boxType = boxStart(expr.getTerm());
expr.visit(this);
boxUnboxEnd(boxType);
endBlock();
out(")");
endLine(true);
}
else if (d.isActual()) {
out("delete ", names.self(outer), ".", names.name(d));
endLine(true);
}
}
}
}
@Override
public void visit(CharLiteral that) {
out(clAlias, "Character(");
out(String.valueOf(that.getText().codePointAt(1)));
out(")");
}
/** Escapes a StringLiteral (needs to be quoted). */
String escapeStringLiteral(String s) {
StringBuilder text = new StringBuilder(s);
//Escape special chars
for (int i=0; i < text.length();i++) {
switch(text.charAt(i)) {
case 8:text.replace(i, i+1, "\\b"); i++; break;
case 9:text.replace(i, i+1, "\\t"); i++; break;
case 10:text.replace(i, i+1, "\\n"); i++; break;
case 12:text.replace(i, i+1, "\\f"); i++; break;
case 13:text.replace(i, i+1, "\\r"); i++; break;
case 34:text.replace(i, i+1, "\\\""); i++; break;
case 39:text.replace(i, i+1, "\\'"); i++; break;
case 92:text.replace(i, i+1, "\\\\"); i++; break;
}
}
return text.toString();
}
@Override
public void visit(StringLiteral that) {
final int slen = that.getText().codePointCount(0, that.getText().length());
if (JsCompiler.compilingLanguageModule) {
out("String$(\"", escapeStringLiteral(that.getText()), "\",", Integer.toString(slen), ")");
} else {
out(clAlias, "String(\"", escapeStringLiteral(that.getText()), "\",", Integer.toString(slen), ")");
}
}
@Override
public void visit(StringTemplate that) {
List<StringLiteral> literals = that.getStringLiterals();
List<Expression> exprs = that.getExpressions();
out(clAlias, "StringBuilder().appendAll([");
boolean first = true;
for (int i = 0; i < literals.size(); i++) {
StringLiteral literal = literals.get(i);
if (!literal.getText().isEmpty()) {
if (!first) { out(","); }
first = false;
literal.visit(this);
}
if (i < exprs.size()) {
if (!first) { out(","); }
first = false;
exprs.get(i).visit(this);
out(".string");
}
}
out("]).string");
}
@Override
public void visit(FloatLiteral that) {
out(clAlias, "Float(", that.getText(), ")");
}
@Override
public void visit(NaturalLiteral that) {
char prefix = that.getText().charAt(0);
if (prefix == '$' || prefix == '#') {
int radix= prefix == '$' ? 2 : 16;
try {
out("(", new java.math.BigInteger(that.getText().substring(1), radix).toString(), ")");
} catch (NumberFormatException ex) {
that.addError("Invalid numeric literal " + that.getText());
}
} else {
out("(", that.getText(), ")");
}
}
@Override
public void visit(This that) {
self(Util.getContainingClassOrInterface(that.getScope()));
}
@Override
public void visit(Super that) {
self(Util.getContainingClassOrInterface(that.getScope()));
}
@Override
public void visit(Outer that) {
if (prototypeStyle) {
Scope scope = that.getScope();
while ((scope != null) && !(scope instanceof TypeDeclaration)) {
scope = scope.getContainer();
}
if (scope != null && ((TypeDeclaration)scope).isClassOrInterfaceMember()) {
self((TypeDeclaration) scope);
out(".");
}
}
self(that.getTypeModel().getDeclaration());
}
@Override
public void visit(BaseMemberExpression that) {
if (that.getErrors() != null && !that.getErrors().isEmpty()) {
//Don't even bother processing a node with errors
return;
}
Declaration decl = that.getDeclaration();
if (decl != null) {
String name = decl.getName();
String pkgName = decl.getUnit().getPackage().getQualifiedNameString();
// map Ceylon true/false/null directly to JS true/false/null
if ("ceylon.language".equals(pkgName)) {
if ("true".equals(name) || "false".equals(name) || "null".equals(name)) {
out(name);
return;
}
}
}
out(memberAccess(that, null));
}
private boolean accessDirectly(Declaration d) {
return !accessThroughGetter(d) || directAccess.contains(d);
}
private boolean accessThroughGetter(Declaration d) {
return (d instanceof MethodOrValue) && !(d instanceof Method)
&& !defineAsProperty(d);
}
private boolean defineAsProperty(Declaration d) {
// for now, only define member attributes as properties, not toplevel attributes
return d.isMember() && (d instanceof MethodOrValue) && !(d instanceof Method);
}
/** Returns true if the top-level declaration for the term is annotated "nativejs" */
private static boolean isNative(Term t) {
if (t instanceof MemberOrTypeExpression) {
return isNative(((MemberOrTypeExpression)t).getDeclaration());
}
return false;
}
/** Returns true if the declaration is annotated "nativejs" */
private static boolean isNative(Declaration d) {
return hasAnnotationByName(getToplevel(d), "nativejs") || TypeUtils.isUnknown(d);
}
private static Declaration getToplevel(Declaration d) {
while (d != null && !d.isToplevel()) {
Scope s = d.getContainer();
// Skip any non-declaration elements
while (s != null && !(s instanceof Declaration)) {
s = s.getContainer();
}
d = (Declaration) s;
}
return d;
}
private static boolean hasAnnotationByName(Declaration d, String name){
if (d != null) {
for(com.redhat.ceylon.compiler.typechecker.model.Annotation annotation : d.getAnnotations()){
if(annotation.getName().equals(name))
return true;
}
}
return false;
}
private void generateSafeOp(QualifiedMemberOrTypeExpression that) {
boolean isMethod = that.getDeclaration() instanceof Method;
String lhsVar = createRetainedTempVar("opt");
out("(", lhsVar, "=");
super.visit(that);
out(",");
if (isMethod) {
out(clAlias, "JsCallable(", lhsVar, ",");
}
out(lhsVar, "!==null?", memberAccess(that, lhsVar), ":null)");
if (isMethod) {
out(")");
}
}
@Override
public void visit(final QualifiedMemberExpression that) {
//Big TODO: make sure the member is actually
// refined by the current class!
if (that.getMemberOperator() instanceof SafeMemberOp) {
generateSafeOp(that);
} else if (that.getMemberOperator() instanceof SpreadOp) {
generateSpread(that);
} else if (that.getDeclaration() instanceof Method && that.getSignature() == null) {
//TODO right now this causes that all method invocations are done this way
//we need to filter somehow to only use this pattern when the result is supposed to be a callable
//looks like checking for signature is a good way (not THE way though; named arg calls don't have signature)
generateCallable(that, null);
} else {
final String lhs = generateToString(new GenerateCallback() {
@Override public void generateValue() {
GenerateJsVisitor.super.visit(that);
}
});
out(memberAccess(that, lhs));
}
}
/** SpreadOp cannot be a simple function call because we need to reference the object methods directly, so it's a function */
private void generateSpread(QualifiedMemberOrTypeExpression that) {
//Determine if it's a method or attribute
boolean isMethod = that.getDeclaration() instanceof Method;
//Define a function
out("(function()");
beginBlock();
if (comment) {
out("//SpreadOp at ", that.getLocation());
endLine();
}
//Declare an array to store the values/references
String tmplist = names.createTempVariable("lst");
out("var ", tmplist, "=[];"); endLine();
//Get an iterator
String iter = names.createTempVariable("it");
out("var ", iter, "=");
super.visit(that);
out(".iterator;"); endLine();
//Iterate
String elem = names.createTempVariable("elem");
out("var ", elem, ";"); endLine();
out("while ((", elem, "=", iter, ".next())!==", clAlias, "getFinished())");
beginBlock();
//Add value or reference to the array
out(tmplist, ".push(");
if (isMethod) {
out("{o:", elem, ", f:", memberAccess(that, elem), "}");
} else {
out(memberAccess(that, elem));
}
out(");");
endBlockNewLine();
//Gather arguments to pass to the callable
//Return the array of values or a Callable with the arguments
out("return ", clAlias);
if (isMethod) {
out("JsCallableList(", tmplist, ");");
} else {
out("ArraySequence(", tmplist, ");");
}
endBlock();
out("())");
}
private void generateCallable(QualifiedMemberOrTypeExpression that, String name) {
String primaryVar = createRetainedTempVar("opt");
out("(", primaryVar, "=");
that.getPrimary().visit(this);
out(",", clAlias, "JsCallable(", primaryVar, ",", primaryVar, "!==null?",
(name == null) ? memberAccess(that, primaryVar) : (primaryVar+"."+name), ":null))");
}
/**
* Checks if the given node is a MemberOrTypeExpression or QualifiedType which
* represents an access to a supertype member and returns the scope of that
* member or null.
*/
Scope getSuperMemberScope(Node node) {
Scope scope = null;
if (node instanceof BaseMemberOrTypeExpression) {
// Check for "Supertype::member"
BaseMemberOrTypeExpression bmte = (BaseMemberOrTypeExpression) node;
if (bmte.getSupertypeQualifier() != null) {
scope = bmte.getDeclaration().getContainer();
}
}
else if (node instanceof QualifiedMemberOrTypeExpression) {
// Check for "super.member"
QualifiedMemberOrTypeExpression qmte = (QualifiedMemberOrTypeExpression) node;
if (qmte.getPrimary() instanceof Super) {
scope = qmte.getDeclaration().getContainer();
}
}
else if (node instanceof QualifiedType) {
// Check for super.Membertype
QualifiedType qtype = (QualifiedType) node;
if (qtype.getOuterType() instanceof SuperType) {
scope = qtype.getDeclarationModel().getContainer();
}
}
return scope;
}
private String memberAccessBase(Node node, Declaration decl, boolean setter,
String lhs) {
StringBuilder sb = new StringBuilder();
if (lhs != null) {
if (lhs.length() > 0) {
sb.append(lhs).append(".");
}
}
else if (node instanceof BaseMemberOrTypeExpression) {
BaseMemberOrTypeExpression bmte = (BaseMemberOrTypeExpression) node;
String path = qualifiedPath(node, bmte.getDeclaration());
if (path.length() > 0) {
sb.append(path);
sb.append(".");
}
}
Scope scope = getSuperMemberScope(node);
if (prototypeStyle && (scope != null)) {
sb.append("getT$all()['");
sb.append(scope.getQualifiedNameString());
sb.append("']");
if (defineAsProperty(decl)) {
return clAlias + (setter ? "attrSetter(" : "attrGetter(")
+ sb.toString() + ",'" + names.name(decl) + "')";
}
sb.append(".$$.prototype.");
}
final String member = (accessThroughGetter(decl) && !accessDirectly(decl))
? (setter ? names.setter(decl) : names.getter(decl)) : names.name(decl);
sb.append(member);
if (!prototypeStyle && (scope != null)) {
sb.append(names.scopeSuffix(scope));
}
//When compiling the language module we need to modify certain base type names
String rval = sb.toString();
if (TypeUtils.isReservedTypename(rval)) {
rval = sb.append("$").toString();
}
return rval;
}
/**
* Returns a string representing a read access to a member, as represented by
* the given expression. If the expression is a QualifiedMemberOrTypeExpression
* then the LHS is *not* included. If it is a BaseMemberOrTypeExpression and
* qualifyBaseExpr==true then the qualified path is included.
*/
private String memberAccess(StaticMemberOrTypeExpression expr, String lhs) {
Declaration decl = expr.getDeclaration();
String plainName = null;
if (decl == null && dynblock > 0) {
plainName = expr.getIdentifier().getText();
}
else if (isNative(decl)) {
// direct access to a native element
plainName = decl.getName();
}
if (plainName != null) {
return ((lhs != null) && (lhs.length() > 0))
? (lhs + "." + plainName) : plainName;
}
boolean protoCall = prototypeStyle && (getSuperMemberScope(expr) != null);
if (accessDirectly(decl) && !(protoCall && defineAsProperty(decl))) {
// direct access, without getter
return memberAccessBase(expr, decl, false, lhs);
}
// access through getter
return memberAccessBase(expr, decl, false, lhs)
+ (protoCall ? ".call(this)" : "()");
}
/**
* Generates a write access to a member, as represented by the given expression.
* The given callback is responsible for generating the assigned value.
* If the expression is a QualifiedMemberOrTypeExpression then the
* LHS is *not* included. If it is a BaseMemberOrTypeExpression and
* qualifyBaseExpr==true then the qualified path is included.
*/
private void generateMemberAccess(StaticMemberOrTypeExpression expr,
GenerateCallback callback, String lhs) {
Declaration decl = expr.getDeclaration();
boolean paren = false;
String plainName = null;
if (decl == null && dynblock > 0) {
plainName = expr.getIdentifier().getText();
} else if (isNative(decl)) {
// direct access to a native element
plainName = decl.getName();
}
if (plainName != null) {
if ((lhs != null) && (lhs.length() > 0)) {
out(lhs, ".");
}
out(plainName, "=");
}
else {
boolean protoCall = prototypeStyle && (getSuperMemberScope(expr) != null);
if (accessDirectly(decl) && !(protoCall && defineAsProperty(decl))) {
// direct access, without setter
out(memberAccessBase(expr, decl, true, lhs), "=");
}
else {
// access through setter
out(memberAccessBase(expr, decl, true, lhs),
protoCall ? ".call(this," : "(");
paren = true;
}
}
callback.generateValue();
if (paren) { out(")"); }
}
private void generateMemberAccess(final StaticMemberOrTypeExpression expr,
final String strValue, final String lhs) {
generateMemberAccess(expr, new GenerateCallback() {
@Override public void generateValue() { out(strValue); }
}, lhs);
}
@Override
public void visit(BaseTypeExpression that) {
if (that.getErrors() != null && !that.getErrors().isEmpty()) return;
qualify(that, that.getDeclaration());
out(names.name(that.getDeclaration()));
}
@Override
public void visit(QualifiedTypeExpression that) {
if (that.getMemberOperator() instanceof SafeMemberOp) {
generateCallable(that, names.name(that.getDeclaration()));
} else {
super.visit(that);
out(".", names.name(that.getDeclaration()));
}
}
public void visit(Dynamic that) {
//this is value{xxx}
invoker.nativeObject(that.getNamedArgumentList());
}
@Override
public void visit(InvocationExpression that) {
invoker.generateInvocation(that);
}
@Override
public void visit(PositionalArgumentList that) {
invoker.generatePositionalArguments(that, that.getPositionalArguments(), false);
}
/** Box a term, visit it, unbox it. */
private void box(Term term) {
final int t = boxStart(term);
term.visit(this);
boxUnboxEnd(t);
}
// Make sure fromTerm is compatible with toTerm by boxing it when necessary
private int boxStart(Term fromTerm) {
boolean fromNative = isNative(fromTerm);
boolean toNative = false;
ProducedType fromType = fromTerm.getTypeModel();
return boxUnboxStart(fromNative, fromType, toNative);
}
// Make sure fromTerm is compatible with toTerm by boxing or unboxing it when necessary
int boxUnboxStart(Term fromTerm, Term toTerm) {
boolean fromNative = isNative(fromTerm);
boolean toNative = isNative(toTerm);
ProducedType fromType = fromTerm.getTypeModel();
return boxUnboxStart(fromNative, fromType, toNative);
}
// Make sure fromTerm is compatible with toDecl by boxing or unboxing it when necessary
int boxUnboxStart(Term fromTerm, com.redhat.ceylon.compiler.typechecker.model.TypedDeclaration toDecl) {
boolean fromNative = isNative(fromTerm);
boolean toNative = isNative(toDecl);
ProducedType fromType = fromTerm.getTypeModel();
return boxUnboxStart(fromNative, fromType, toNative);
}
int boxUnboxStart(boolean fromNative, ProducedType fromType, boolean toNative) {
if (fromNative != toNative) {
// Box the value
String fromTypeName = TypeUtils.isUnknown(fromType) ? "UNKNOWN" : fromType.getProducedTypeQualifiedName();
if (fromNative) {
// conversion from native value to Ceylon value
if (fromTypeName.equals("ceylon.language::String")) {
if (JsCompiler.compilingLanguageModule) {
out("String$(");
} else {
out(clAlias, "String(");
}
} else if (fromTypeName.equals("ceylon.language::Integer")) {
out("(");
} else if (fromTypeName.equals("ceylon.language::Float")) {
out(clAlias, "Float(");
} else if (fromTypeName.equals("ceylon.language::Boolean")) {
out("(");
} else if (fromTypeName.equals("ceylon.language::Character")) {
out(clAlias, "Character(");
} else {
return 0;
}
return 1;
} else if ("ceylon.language::String".equals(fromTypeName)
|| "ceylon.language::Float".equals(fromTypeName)) {
// conversion from Ceylon String or Float to native value
return 2;
} else {
return 3;
}
}
return 0;
}
void boxUnboxEnd(int boxType) {
switch (boxType) {
case 1: out(")"); break;
case 2: out(".valueOf()"); break;
default: //nothing
}
}
@Override
public void visit(ObjectArgument that) {
//Don't even bother with nodes that have errors
if (that.getErrors() != null && !that.getErrors().isEmpty()) return;
final Class c = (Class)that.getDeclarationModel().getTypeDeclaration();
out("(function()");
beginBlock();
out("//ObjectArgument ", that.getIdentifier().getText());
location(that);
endLine();
out(function, names.name(c), "()");
beginBlock();
instantiateSelf(c);
referenceOuter(c);
ExtendedType xt = that.getExtendedType();
final ClassBody body = that.getClassBody();
SatisfiedTypes sts = that.getSatisfiedTypes();
final List<Declaration> superDecs = new ArrayList<Declaration>();
if (!prototypeStyle) {
new SuperVisitor(superDecs).visit(that.getClassBody());
}
callSuperclass(xt, c, that, superDecs);
callInterfaces(sts, c, that, superDecs);
body.visit(this);
returnSelf(c);
indentLevel--;
endLine();
out("}");
endLine();
typeInitialization(xt, sts, false, c, new PrototypeInitCallback() {
@Override
public void addToPrototypeCallback() {
addToPrototype(c, body.getStatements());
}
});
out("return ", names.name(c), "(new ", names.name(c), ".$$);");
endBlock();
out("())");
}
@Override
public void visit(AttributeArgument that) {
out("(function()");
beginBlock();
out("//AttributeArgument ", that.getParameter().getName());
location(that);
endLine();
Block block = that.getBlock();
SpecifierExpression specExpr = that.getSpecifierExpression();
if (specExpr != null) {
out("return ");
specExpr.getExpression().visit(this);
out(";");
}
else if (block != null) {
visitStatements(block.getStatements());
}
endBlock();
out("())");
}
@Override
public void visit(SequencedArgument that) {
List<PositionalArgument> positionalArguments = that.getPositionalArguments();
boolean spread = !positionalArguments.isEmpty()
&& positionalArguments.get(positionalArguments.size()-1) instanceof Tree.ListedArgument == false;
if (!spread) { out("["); }
boolean first=true;
for (PositionalArgument arg: positionalArguments) {
if (!first) out(",");
if (arg instanceof Tree.ListedArgument) {
((Tree.ListedArgument) arg).getExpression().visit(this);
} else if(arg instanceof Tree.SpreadArgument)
((Tree.SpreadArgument) arg).getExpression().visit(this);
else // comprehension
arg.visit(this);
first = false;
}
if (!spread) { out("]"); }
}
@Override
public void visit(SequenceEnumeration that) {
SequencedArgument sarg = that.getSequencedArgument();
if (sarg == null) {
out(clAlias, "getEmpty()");
} else {
List<PositionalArgument> positionalArguments = sarg.getPositionalArguments();
int lim = positionalArguments.size()-1;
boolean spread = !positionalArguments.isEmpty()
&& positionalArguments.get(positionalArguments.size()-1) instanceof Tree.ListedArgument == false;
int count=0;
ProducedType chainedType = null;
if (lim>0 || !spread) {
out("[");
}
for (PositionalArgument expr : positionalArguments) {
if (count==lim && spread) {
if (lim > 0) {
ProducedType seqType = TypeUtils.findSupertype(types.iterable, that.getTypeModel());
closeSequenceWithReifiedType(that, seqType.getTypeArguments());
out(".chain(");
chainedType = TypeUtils.findSupertype(types.iterable, expr.getTypeModel());
}
count--;
} else {
if (count > 0) {
out(",");
}
}
if (dynblock > 0 && expr instanceof ListedArgument && TypeUtils.isUnknown(expr.getTypeModel())) {
TypeUtils.generateDynamicCheck(((ListedArgument)expr).getExpression(), types.anything.getType(), this);
} else {
expr.visit(this);
}
count++;
}
if (chainedType == null) {
if (!spread) {
closeSequenceWithReifiedType(that, that.getTypeModel().getTypeArguments());
}
} else {
out(",");
TypeUtils.printTypeArguments(that, chainedType.getTypeArguments(), this);
out(")");
}
}
}
@Override
public void visit(Comprehension that) {
new ComprehensionGenerator(this, names, directAccess).generateComprehension(that);
}
@Override
public void visit(final SpecifierStatement that) {
// A lazy specifier expression in a class/interface should go into the
// prototype in prototype style, so don't generate them here.
if (!(prototypeStyle && (that.getSpecifierExpression() instanceof LazySpecifierExpression)
&& (that.getScope().getContainer() instanceof TypeDeclaration))) {
specifierStatement(null, that);
}
}
private void specifierStatement(final TypeDeclaration outer,
final SpecifierStatement specStmt) {
if (specStmt.getBaseMemberExpression() instanceof BaseMemberExpression) {
BaseMemberExpression bme = (BaseMemberExpression) specStmt.getBaseMemberExpression();
Declaration bmeDecl = bme.getDeclaration();
if (specStmt.getSpecifierExpression() instanceof LazySpecifierExpression) {
// attr => expr;
final boolean property = defineAsProperty(bmeDecl);
if (property) {
out(clAlias, "defineAttr(", qualifiedPath(specStmt, bmeDecl), ",'",
names.name(bmeDecl), "',function()");
} else {
if (bmeDecl.isMember()) {
qualify(specStmt, bmeDecl);
} else {
out ("var ");
}
out(names.getter(bmeDecl), "=function()");
}
beginBlock();
if (outer != null) { initSelf(specStmt.getScope()); }
out ("return ");
specStmt.getSpecifierExpression().visit(this);
out(";");
endBlock();
if (property) { out(")"); }
endLine(true);
directAccess.remove(bmeDecl);
}
else if (outer != null) {
// "attr = expr;" in a prototype definition
if (bmeDecl.isMember() && (bmeDecl instanceof Value) && bmeDecl.isActual()) {
out("delete ", names.self(outer), ".", names.name(bmeDecl));
endLine(true);
}
}
else if (bmeDecl instanceof MethodOrValue) {
// "attr = expr;" in an initializer or method
final MethodOrValue moval = (MethodOrValue)bmeDecl;
if (moval.isVariable()) {
// simple assignment to a variable attribute
generateMemberAccess(bme, new GenerateCallback() {
@Override public void generateValue() {
int boxType = boxUnboxStart(specStmt.getSpecifierExpression().getExpression().getTerm(),
moval);
if (dynblock > 0 && !TypeUtils.isUnknown(moval.getType())
&& TypeUtils.isUnknown(specStmt.getSpecifierExpression().getExpression().getTypeModel())) {
TypeUtils.generateDynamicCheck(specStmt.getSpecifierExpression().getExpression(),
moval.getType(), GenerateJsVisitor.this);
} else {
specStmt.getSpecifierExpression().getExpression().visit(GenerateJsVisitor.this);
}
boxUnboxEnd(boxType);
}
}, null);
out(";");
} else if (moval.isMember()) {
// Specifier for a member attribute. This actually defines the
// member (e.g. in shortcut refinement syntax the attribute
// declaration itself can be omitted), so generate the attribute.
generateAttributeGetter(moval,
specStmt.getSpecifierExpression(), null);
} else {
// Specifier for some other attribute, or for a method.
if (prototypeStyle
|| (bmeDecl.isMember() && (bmeDecl instanceof Method))) {
qualify(specStmt, bmeDecl);
}
out(names.name(bmeDecl), "=");
if (dynblock > 0 && TypeUtils.isUnknown(specStmt.getSpecifierExpression().getExpression().getTypeModel())) {
TypeUtils.generateDynamicCheck(specStmt.getSpecifierExpression().getExpression(),
bme.getTypeModel(), this);
} else {
specStmt.getSpecifierExpression().visit(this);
}
out(";");
}
}
}
else if ((specStmt.getBaseMemberExpression() instanceof ParameterizedExpression)
&& (specStmt.getSpecifierExpression() != null)) {
final ParameterizedExpression paramExpr =
(ParameterizedExpression) specStmt.getBaseMemberExpression();
if (paramExpr.getPrimary() instanceof BaseMemberExpression) {
// func(params) => expr;
BaseMemberExpression bme = (BaseMemberExpression) paramExpr.getPrimary();
Declaration bmeDecl = bme.getDeclaration();
if (bmeDecl.isMember()) {
qualify(specStmt, bmeDecl);
} else {
out("var ");
}
out(names.name(bmeDecl), "=");
singleExprFunction(paramExpr.getParameterLists(),
specStmt.getSpecifierExpression().getExpression(),
specStmt.getScope());
out(";");
}
}
}
private void addSpecifierToPrototype(final TypeDeclaration outer,
final SpecifierStatement specStmt) {
specifierStatement(outer, specStmt);
}
@Override
public void visit(final AssignOp that) {
String returnValue = null;
StaticMemberOrTypeExpression lhsExpr = null;
if (dynblock > 0 && TypeUtils.isUnknown(that.getLeftTerm().getTypeModel())) {
that.getLeftTerm().visit(this);
out("=");
that.getRightTerm().visit(this);
return;
}
out("(");
if (that.getLeftTerm() instanceof BaseMemberExpression) {
BaseMemberExpression bme = (BaseMemberExpression) that.getLeftTerm();
lhsExpr = bme;
Declaration bmeDecl = bme.getDeclaration();
boolean simpleSetter = hasSimpleGetterSetter(bmeDecl);
if (!simpleSetter) {
returnValue = memberAccess(bme, null);
}
} else if (that.getLeftTerm() instanceof QualifiedMemberExpression) {
QualifiedMemberExpression qme = (QualifiedMemberExpression)that.getLeftTerm();
lhsExpr = qme;
boolean simpleSetter = hasSimpleGetterSetter(qme.getDeclaration());
String lhsVar = null;
if (!simpleSetter) {
lhsVar = createRetainedTempVar();
out(lhsVar, "=");
super.visit(qme);
out(",", lhsVar, ".");
returnValue = memberAccess(qme, lhsVar);
} else {
super.visit(qme);
out(".");
}
}
generateMemberAccess(lhsExpr, new GenerateCallback() {
@Override public void generateValue() {
int boxType = boxUnboxStart(that.getRightTerm(), that.getLeftTerm());
that.getRightTerm().visit(GenerateJsVisitor.this);
boxUnboxEnd(boxType);
}
}, null);
if (returnValue != null) { out(",", returnValue); }
out(")");
}
/** Outputs the module name for the specified declaration. Returns true if something was output. */
boolean qualify(Node that, Declaration d) {
if (d.getUnit().getPackage().getModule().isDefault()) {
return false;
}
String path = qualifiedPath(that, d);
if (path.length() > 0) {
out(path, ".");
}
return path.length() > 0;
}
private String qualifiedPath(Node that, Declaration d) {
return qualifiedPath(that, d, false);
}
private String qualifiedPath(Node that, Declaration d, boolean inProto) {
boolean isMember = d.isClassOrInterfaceMember();
if (!isMember && isImported(that, d)) {
return names.moduleAlias(d.getUnit().getPackage().getModule());
}
else if (prototypeStyle && !inProto) {
if (isMember && !(d instanceof com.redhat.ceylon.compiler.typechecker.model.Parameter
&& !d.isCaptured())) {
TypeDeclaration id = that.getScope().getInheritingDeclaration(d);
if (id == null) {
//a local declaration of some kind,
//perhaps in an outer scope
id = (TypeDeclaration) d.getContainer();
} //else {
//an inherited declaration that might be
//inherited by an outer scope
//}
String path = "";
Scope scope = that.getScope();
// if (inProto) {
// while ((scope != null) && (scope instanceof TypeDeclaration)) {
// scope = scope.getContainer();
// }
// }
if ((scope != null) && ((that instanceof ClassDeclaration)
|| (that instanceof InterfaceDeclaration))) {
// class/interface aliases have no own "this"
scope = scope.getContainer();
}
while (scope != null) {
if (scope instanceof TypeDeclaration) {
if (path.length() > 0) {
path += '.';
}
path += names.self((TypeDeclaration) scope);
} else {
path = "";
}
if (scope == id) {
break;
}
scope = scope.getContainer();
}
return path;
}
}
else if (d != null && (d.isShared() || inProto) && isMember) {
TypeDeclaration id = that.getScope().getInheritingDeclaration(d);
if (id==null) {
//a shared local declaration
return names.self((TypeDeclaration)d.getContainer());
}
else {
//an inherited declaration that might be
//inherited by an outer scope
return names.self(id);
}
}
return "";
}
/** Tells whether a declaration is in the same package as a node. */
private boolean isImported(Node that, Declaration d) {
if (d == null) {
return false;
}
Package p1 = d.getUnit().getPackage();
Package p2 = that == null ? null : that.getUnit().getPackage();
return !p1.equals(p2);
}
@Override
public void visit(ExecutableStatement that) {
super.visit(that);
endLine(true);
}
/** Creates a new temporary variable which can be used immediately, even
* inside an expression. The declaration for that temporary variable will be
* emitted after the current Ceylon statement has been completely processed.
* The resulting code is valid because JavaScript variables may be used before
* they are declared. */
private String createRetainedTempVar(String baseName) {
String varName = names.createTempVariable(baseName);
retainedVars.add(varName);
return varName;
}
private String createRetainedTempVar() {
return createRetainedTempVar("tmp");
}
// @Override
// public void visit(Expression that) {
// if (that.getTerm() instanceof QualifiedMemberOrTypeExpression) {
// QualifiedMemberOrTypeExpression term = (QualifiedMemberOrTypeExpression) that.getTerm();
// // References to methods of types from other packages always need
// // special treatment, even if prototypeStyle==false, because they
// // may have been generated in prototype style. In particular,
// // ceylon.language is always in prototype style.
// if ((term.getDeclaration() instanceof Functional)
// && (prototypeStyle || !declaredInThisPackage(term.getDeclaration()))) {
// if (term.getMemberOperator() instanceof SpreadOp) {
// generateSpread(term);
// } else {
// generateCallable(term, names.name(term.getDeclaration()));
// }
// return;
// }
// }
// super.visit(that);
// }
@Override
public void visit(Return that) {
out("return ");
super.visit(that);
}
@Override
public void visit(AnnotationList that) {}
void self(TypeDeclaration d) {
out(names.self(d));
}
/* * Output the name of a variable that receives the type parameter info, usually in the class constructor. * /
private void selfTypeParameters(TypeDeclaration d) {
out(selfTypeParametersString(d));
}
private String selfTypeParametersString(TypeDeclaration d) {
return "$$typeParms" + d.getName();
}*/
/*private void self() {
out("$$");
}*/
private boolean outerSelf(Declaration d) {
if (d.isToplevel()) {
out("exports");
return true;
}
else if (d.isClassOrInterfaceMember()) {
self((TypeDeclaration)d.getContainer());
return true;
}
return false;
}
private boolean declaredInCL(Declaration decl) {
return decl.getUnit().getPackage().getQualifiedNameString()
.startsWith("ceylon.language");
}
@Override
public void visit(SumOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
termgen.left();
out(".plus(");
termgen.right();
out(")");
}
});
}
@Override
public void visit(DifferenceOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
termgen.left();
out(".minus(");
termgen.right();
out(")");
}
});
}
@Override
public void visit(ProductOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
termgen.left();
out(".times(");
termgen.right();
out(")");
}
});
}
@Override
public void visit(QuotientOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
termgen.left();
out(".divided(");
termgen.right();
out(")");
}
});
}
@Override public void visit(RemainderOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
termgen.left();
out(".remainder(");
termgen.right();
out(")");
}
});
}
@Override public void visit(PowerOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
termgen.left();
out(".power(");
termgen.right();
out(")");
}
});
}
@Override public void visit(AddAssignOp that) {
arithmeticAssignOp(that, "plus");
}
@Override public void visit(SubtractAssignOp that) {
arithmeticAssignOp(that, "minus");
}
@Override public void visit(MultiplyAssignOp that) {
arithmeticAssignOp(that, "times");
}
@Override public void visit(DivideAssignOp that) {
arithmeticAssignOp(that, "divided");
}
@Override public void visit(RemainderAssignOp that) {
arithmeticAssignOp(that, "remainder");
}
private void arithmeticAssignOp(final ArithmeticAssignmentOp that,
final String functionName) {
Term lhs = that.getLeftTerm();
if (lhs instanceof BaseMemberExpression) {
BaseMemberExpression lhsBME = (BaseMemberExpression) lhs;
Declaration lhsDecl = lhsBME.getDeclaration();
final String getLHS = memberAccess(lhsBME, null);
out("(");
generateMemberAccess(lhsBME, new GenerateCallback() {
@Override public void generateValue() {
out(getLHS, ".", functionName, "(");
that.getRightTerm().visit(GenerateJsVisitor.this);
out(")");
}
}, null);
if (!hasSimpleGetterSetter(lhsDecl)) { out(",", getLHS); }
out(")");
} else if (lhs instanceof QualifiedMemberExpression) {
QualifiedMemberExpression lhsQME = (QualifiedMemberExpression) lhs;
if (isNative(lhsQME)) {
// ($1.foo = Box($1.foo).operator($2))
out("(");
lhsQME.getPrimary().visit(this);
out(".", lhsQME.getDeclaration().getName());
out("=");
int boxType = boxStart(lhsQME);
lhsQME.getPrimary().visit(this);
out(".", lhsQME.getDeclaration().getName());
boxUnboxEnd(boxType);
out(".", functionName, "(");
that.getRightTerm().visit(this);
out("))");
} else {
final String lhsPrimaryVar = createRetainedTempVar();
final String getLHS = memberAccess(lhsQME, lhsPrimaryVar);
out("(", lhsPrimaryVar, "=");
lhsQME.getPrimary().visit(this);
out(",");
generateMemberAccess(lhsQME, new GenerateCallback() {
@Override public void generateValue() {
out(getLHS, ".", functionName, "(");
that.getRightTerm().visit(GenerateJsVisitor.this);
out(")");
}
}, lhsPrimaryVar);
if (!hasSimpleGetterSetter(lhsQME.getDeclaration())) {
out(",", getLHS);
}
out(")");
}
}
}
@Override public void visit(final NegativeOp that) {
unaryOp(that, new UnaryOpGenerator() {
@Override
public void generate(UnaryOpTermGenerator termgen) {
TypeDeclaration d = that.getTerm().getTypeModel().getDeclaration();
if (d.inherits(types._integer)) {
out("(-");
termgen.term();
out(")");
//This is not really optimal yet, since it generates
//stuff like Float(-Float((5.1)))
/*} else if (d.inherits(types._float)) {
out(clAlias, "Float(-");
termgen.term();
out(")");*/
} else {
termgen.term();
out(".negativeValue");
}
}
});
}
@Override public void visit(final PositiveOp that) {
unaryOp(that, new UnaryOpGenerator() {
@Override
public void generate(UnaryOpTermGenerator termgen) {
TypeDeclaration d = that.getTerm().getTypeModel().getDeclaration();
if (d.inherits(types._integer) || d.inherits(types._float)) {
out("(+");
termgen.term();
out(")");
} else {
termgen.term();
out(".positiveValue");
}
}
});
}
@Override public void visit(EqualOp that) {
if (dynblock > 0 && TypeUtils.isUnknown(that.getLeftTerm().getTypeModel())) {
//Try to use equals() if it exists
String ltmp = names.createTempVariable();
String rtmp = names.createTempVariable();
out("(", ltmp, "=");
box(that.getLeftTerm());
out(",", rtmp, "=");
box(that.getRightTerm());
out(",(", ltmp, ".equals&&", ltmp, ".equals(", rtmp, "))||", ltmp, "===", rtmp, ")");
} else {
leftEqualsRight(that);
}
}
@Override public void visit(NotEqualOp that) {
if (dynblock > 0 && TypeUtils.isUnknown(that.getLeftTerm().getTypeModel())) {
//Try to use equals() if it exists
String ltmp = names.createTempVariable();
String rtmp = names.createTempVariable();
out("(", ltmp, "=");
box(that.getLeftTerm());
out(",", rtmp, "=");
box(that.getRightTerm());
out(",(", ltmp, ".equals&&!", ltmp, ".equals(", rtmp, "))||", ltmp, "!==", rtmp, ")");
} else {
out("(!");
leftEqualsRight(that);
out(")");
}
}
@Override public void visit(NotOp that) {
unaryOp(that, new UnaryOpGenerator() {
@Override
public void generate(UnaryOpTermGenerator termgen) {
out("(!");
termgen.term();
out(")");
}
});
}
@Override public void visit(IdenticalOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
out("(");
termgen.left();
out("===");
termgen.right();
out(")");
}
});
}
@Override public void visit(CompareOp that) {
leftCompareRight(that);
}
@Override public void visit(SmallerOp that) {
if (dynblock > 0 && TypeUtils.isUnknown(that.getLeftTerm().getTypeModel())) {
//Try to use compare() if it exists
String ltmp = names.createTempVariable();
String rtmp = names.createTempVariable();
out("(", ltmp, "=");
box(that.getLeftTerm());
out(",", rtmp, "=");
box(that.getRightTerm());
out(",(", ltmp, ".compare&&", ltmp, ".compare(", rtmp, ").equals(",
clAlias, "getSmaller()))||", ltmp, "<", rtmp, ")");
} else {
leftCompareRight(that);
out(".equals(", clAlias, "getSmaller())");
}
}
@Override public void visit(LargerOp that) {
if (dynblock > 0 && TypeUtils.isUnknown(that.getLeftTerm().getTypeModel())) {
//Try to use compare() if it exists
String ltmp = names.createTempVariable();
String rtmp = names.createTempVariable();
out("(", ltmp, "=");
box(that.getLeftTerm());
out(",", rtmp, "=");
box(that.getRightTerm());
out(",(", ltmp, ".compare&&", ltmp, ".compare(", rtmp, ").equals(",
clAlias, "getLarger()))||", ltmp, ">", rtmp, ")");
} else {
leftCompareRight(that);
out(".equals(", clAlias, "getLarger())");
}
}
@Override public void visit(SmallAsOp that) {
if (dynblock > 0 && TypeUtils.isUnknown(that.getLeftTerm().getTypeModel())) {
//Try to use compare() if it exists
String ltmp = names.createTempVariable();
String rtmp = names.createTempVariable();
out("(", ltmp, "=");
box(that.getLeftTerm());
out(",", rtmp, "=");
box(that.getRightTerm());
out(",(", ltmp, ".compare&&", ltmp, ".compare(", rtmp, "!==",
clAlias, "getLarger()))||", ltmp, "<=", rtmp, ")");
} else {
out("(");
leftCompareRight(that);
out("!==", clAlias, "getLarger()");
out(")");
}
}
@Override public void visit(LargeAsOp that) {
if (dynblock > 0 && TypeUtils.isUnknown(that.getLeftTerm().getTypeModel())) {
//Try to use compare() if it exists
String ltmp = names.createTempVariable();
String rtmp = names.createTempVariable();
out("(", ltmp, "=");
box(that.getLeftTerm());
out(",", rtmp, "=");
box(that.getRightTerm());
out(",(", ltmp, ".compare&&", ltmp, ".compare(", rtmp, "!==",
clAlias, "getSmaller()))||", ltmp, ">=", rtmp, ")");
} else {
out("(");
leftCompareRight(that);
out("!==", clAlias, "getSmaller()");
out(")");
}
}
/** Outputs the CL equivalent of 'a==b' in JS. */
private void leftEqualsRight(BinaryOperatorExpression that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
termgen.left();
out(".equals(");
termgen.right();
out(")");
}
});
}
interface UnaryOpTermGenerator {
void term();
}
interface UnaryOpGenerator {
void generate(UnaryOpTermGenerator termgen);
}
private void unaryOp(final UnaryOperatorExpression that, final UnaryOpGenerator gen) {
final GenerateJsVisitor visitor = this;
gen.generate(new UnaryOpTermGenerator() {
@Override
public void term() {
int boxTypeLeft = boxStart(that.getTerm());
that.getTerm().visit(visitor);
boxUnboxEnd(boxTypeLeft);
}
});
}
interface BinaryOpTermGenerator {
void left();
void right();
}
interface BinaryOpGenerator {
void generate(BinaryOpTermGenerator termgen);
}
private void binaryOp(final BinaryOperatorExpression that, final BinaryOpGenerator gen) {
gen.generate(new BinaryOpTermGenerator() {
@Override
public void left() {
box(that.getLeftTerm());
}
@Override
public void right() {
box(that.getRightTerm());
}
});
}
/** Outputs the CL equivalent of 'a <=> b' in JS. */
private void leftCompareRight(BinaryOperatorExpression that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
termgen.left();
out(".compare(");
termgen.right();
out(")");
}
});
}
@Override public void visit(AndOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
out("(");
termgen.left();
out("&&");
termgen.right();
out(")");
}
});
}
@Override public void visit(OrOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
out("(");
termgen.left();
out("||");
termgen.right();
out(")");
}
});
}
@Override public void visit(final EntryOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
out(clAlias, "Entry(");
termgen.left();
out(",");
termgen.right();
out(",");
TypeUtils.printTypeArguments(that, that.getTypeModel().getTypeArguments(),
GenerateJsVisitor.this);
out(")");
}
});
}
@Override public void visit(Element that) {
out(".get(");
that.getExpression().visit(this);
out(")");
}
@Override public void visit(DefaultOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
String lhsVar = createRetainedTempVar("opt");
out("(", lhsVar, "=");
termgen.left();
out(",", lhsVar, "!==null?", lhsVar, ":");
termgen.right();
out(")");
}
});
}
@Override public void visit(ThenOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
out("(");
termgen.left();
out("?");
termgen.right();
out(":null)");
}
});
}
@Override public void visit(IncrementOp that) {
prefixIncrementOrDecrement(that.getTerm(), "successor");
}
@Override public void visit(DecrementOp that) {
prefixIncrementOrDecrement(that.getTerm(), "predecessor");
}
private boolean hasSimpleGetterSetter(Declaration decl) {
return (dynblock > 0 && TypeUtils.isUnknown(decl)) ||
!((decl instanceof Getter) || (decl instanceof Setter) || decl.isFormal());
}
private void prefixIncrementOrDecrement(Term term, String functionName) {
if (term instanceof BaseMemberExpression) {
BaseMemberExpression bme = (BaseMemberExpression) term;
boolean simpleSetter = hasSimpleGetterSetter(bme.getDeclaration());
String getMember = memberAccess(bme, null);
String applyFunc = String.format("%s.%s", getMember, functionName);
out("(");
generateMemberAccess(bme, applyFunc, null);
if (!simpleSetter) { out(",", getMember); }
out(")");
} else if (term instanceof QualifiedMemberExpression) {
QualifiedMemberExpression qme = (QualifiedMemberExpression) term;
String primaryVar = createRetainedTempVar();
String getMember = memberAccess(qme, primaryVar);
String applyFunc = String.format("%s.%s", getMember, functionName);
out("(", primaryVar, "=");
qme.getPrimary().visit(this);
out(",");
generateMemberAccess(qme, applyFunc, primaryVar);
if (!hasSimpleGetterSetter(qme.getDeclaration())) {
out(",", getMember);
}
out(")");
}
}
@Override public void visit(PostfixIncrementOp that) {
postfixIncrementOrDecrement(that.getTerm(), "successor");
}
@Override public void visit(PostfixDecrementOp that) {
postfixIncrementOrDecrement(that.getTerm(), "predecessor");
}
private void postfixIncrementOrDecrement(Term term, String functionName) {
if (term instanceof BaseMemberExpression) {
BaseMemberExpression bme = (BaseMemberExpression) term;
if (bme.getDeclaration() == null && dynblock > 0) {
out(bme.getIdentifier().getText(), "successor".equals(functionName) ? "++" : "--");
return;
}
String oldValueVar = createRetainedTempVar("old" + bme.getDeclaration().getName());
String applyFunc = String.format("%s.%s", oldValueVar, functionName);
out("(", oldValueVar, "=", memberAccess(bme, null), ",");
generateMemberAccess(bme, applyFunc, null);
out(",", oldValueVar, ")");
} else if (term instanceof QualifiedMemberExpression) {
QualifiedMemberExpression qme = (QualifiedMemberExpression) term;
if (qme.getDeclaration() == null && dynblock > 0) {
out(qme.getIdentifier().getText(), "successor".equals(functionName) ? "++" : "--");
return;
}
String primaryVar = createRetainedTempVar();
String oldValueVar = createRetainedTempVar("old" + qme.getDeclaration().getName());
String applyFunc = String.format("%s.%s", oldValueVar, functionName);
out("(", primaryVar, "=");
qme.getPrimary().visit(this);
out(",", oldValueVar, "=", memberAccess(qme, primaryVar), ",");
generateMemberAccess(qme, applyFunc, primaryVar);
out(",", oldValueVar, ")");
}
}
@Override
public void visit(final UnionOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
termgen.left();
out(".union(");
termgen.right();
out(",");
TypeUtils.printTypeArguments(that, that.getRightTerm().getTypeModel().getTypeArguments(),
GenerateJsVisitor.this);
out(")");
}
});
}
@Override
public void visit(final IntersectionOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
termgen.left();
out(".intersection(");
termgen.right();
out(",");
TypeUtils.printTypeArguments(that, that.getRightTerm().getTypeModel().getTypeArguments(),
GenerateJsVisitor.this);
out(")");
}
});
}
@Override
public void visit(final XorOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
termgen.left();
out(".exclusiveUnion(");
termgen.right();
out(",");
TypeUtils.printTypeArguments(that, that.getRightTerm().getTypeModel().getTypeArguments(),
GenerateJsVisitor.this);
out(")");
}
});
}
@Override
public void visit(final ComplementOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
termgen.left();
out(".complement(");
termgen.right();
out(",");
TypeUtils.printTypeArguments(that, that.getRightTerm().getTypeModel().getTypeArguments(),
GenerateJsVisitor.this);
out(")");
}
});
}
@Override public void visit(Exists that) {
unaryOp(that, new UnaryOpGenerator() {
@Override
public void generate(UnaryOpTermGenerator termgen) {
out(clAlias, "exists(");
termgen.term();
out(")");
}
});
}
@Override public void visit(Nonempty that) {
unaryOp(that, new UnaryOpGenerator() {
@Override
public void generate(UnaryOpTermGenerator termgen) {
out(clAlias, "nonempty(");
termgen.term();
out(")");
}
});
}
//Don't know if we'll ever see this...
@Override public void visit(ConditionList that) {
System.out.println("ZOMG condition list in the wild! " + that.getLocation() + " of " + that.getUnit().getFilename());
super.visit(that);
}
@Override public void visit(BooleanCondition that) {
int boxType = boxStart(that.getExpression().getTerm());
super.visit(that);
boxUnboxEnd(boxType);
}
@Override public void visit(IfStatement that) {
conds.generateIf(that);
}
@Override public void visit(WhileStatement that) {
conds.generateWhile(that);
}
/** Generates js code to check if a term is of a certain type. We solve this in JS by
* checking against all types that Type satisfies (in the case of union types, matching any
* type will do, and in case of intersection types, all types must be matched).
* @param term The term that is to be checked against a type
* @param termString (optional) a string to be used as the term to be checked
* @param type The type to check against
* @param tmpvar (optional) a variable to which the term is assigned
* @param negate If true, negates the generated condition
*/
void generateIsOfType(Node term, String termString, Type type, String tmpvar, final boolean negate) {
if (negate) {
out("!");
}
out(clAlias, "isOfType(");
if (term instanceof Term) {
conds.specialConditionRHS((Term)term, tmpvar);
} else {
conds.specialConditionRHS(termString, tmpvar);
}
out(",");
TypeUtils.typeNameOrList(term, type.getTypeModel(), this, true);
out(")");
}
@Override
public void visit(IsOp that) {
generateIsOfType(that.getTerm(), null, that.getType(), null, false);
}
@Override public void visit(Break that) {
if (continues.isEmpty()) {
out("break;");
} else {
Continuation top=continues.peek();
if (that.getScope()==top.getScope()) {
top.useBreak();
out(top.getBreakName(), "=true; return;");
} else {
out("break;");
}
}
}
@Override public void visit(Continue that) {
if (continues.isEmpty()) {
out("continue;");
} else {
Continuation top=continues.peek();
if (that.getScope()==top.getScope()) {
top.useContinue();
out(top.getContinueName(), "=true; return;");
} else {
out("continue;");
}
}
}
@Override public void visit(final RangeOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
out(clAlias, "Range(");
termgen.left();
out(",");
termgen.right();
out(",");
TypeUtils.printTypeArguments(that,
that.getTypeModel().getTypeArguments(),
GenerateJsVisitor.this);
out(")");
}
});
}
@Override public void visit(ForStatement that) {
if (comment) {
out("//'for' statement at ", that.getUnit().getFilename(), " (", that.getLocation(), ")");
if (that.getExits()) out("//EXITS!");
endLine();
}
ForIterator foriter = that.getForClause().getForIterator();
final String itemVar = generateForLoop(foriter);
boolean hasElse = that.getElseClause() != null && !that.getElseClause().getBlock().getStatements().isEmpty();
visitStatements(that.getForClause().getBlock().getStatements());
//If there's an else block, check for normal termination
endBlock();
if (hasElse) {
endLine();
out("if (", clAlias, "getFinished() === ", itemVar, ")");
encloseBlockInFunction(that.getElseClause().getBlock());
}
}
/** Generates code for the beginning of a "for" loop, returning the name of the variable used for the item. */
private String generateForLoop(ForIterator that) {
SpecifierExpression iterable = that.getSpecifierExpression();
final String iterVar = names.createTempVariable("it");
final String itemVar;
if (that instanceof ValueIterator) {
itemVar = names.name(((ValueIterator)that).getVariable().getDeclarationModel());
} else {
itemVar = names.createTempVariable("item");
}
out("var ", iterVar, " = ");
iterable.visit(this);
out(".iterator;");
endLine();
out("var ", itemVar, ";while ((", itemVar, "=", iterVar, ".next())!==", clAlias, "getFinished())");
beginBlock();
if (that instanceof ValueIterator) {
directAccess.add(((ValueIterator)that).getVariable().getDeclarationModel());
} else if (that instanceof KeyValueIterator) {
String keyvar = names.name(((KeyValueIterator)that).getKeyVariable().getDeclarationModel());
String valvar = names.name(((KeyValueIterator)that).getValueVariable().getDeclarationModel());
out("var ", keyvar, "=", itemVar, ".key;");
endLine();
out("var ", valvar, "=", itemVar, ".item;");
directAccess.add(((KeyValueIterator)that).getKeyVariable().getDeclarationModel());
directAccess.add(((KeyValueIterator)that).getValueVariable().getDeclarationModel());
endLine();
}
return itemVar;
}
public void visit(InOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
termgen.right();
out(".contains(");
termgen.left();
out(")");
}
});
}
@Override public void visit(TryCatchStatement that) {
out("try");
encloseBlockInFunction(that.getTryClause().getBlock());
if (!that.getCatchClauses().isEmpty()) {
String catchVarName = names.createTempVariable("ex");
out("catch(", catchVarName, ")");
beginBlock();
boolean firstCatch = true;
for (CatchClause catchClause : that.getCatchClauses()) {
Variable variable = catchClause.getCatchVariable().getVariable();
if (!firstCatch) {
out("else ");
}
firstCatch = false;
out("if(");
generateIsOfType(variable, catchVarName, variable.getType(), null, false);
out(")");
if (catchClause.getBlock().getStatements().isEmpty()) {
out("{}");
} else {
beginBlock();
directAccess.add(variable.getDeclarationModel());
names.forceName(variable.getDeclarationModel(), catchVarName);
visitStatements(catchClause.getBlock().getStatements());
endBlockNewLine();
}
}
out("else{throw ", catchVarName, "}");
endBlockNewLine();
}
if (that.getFinallyClause() != null) {
out("finally");
encloseBlockInFunction(that.getFinallyClause().getBlock());
}
}
@Override public void visit(Throw that) {
out("throw ");
if (that.getExpression() != null) {
that.getExpression().visit(this);
} else {
out(clAlias, "Exception()");
}
out(";");
}
private void visitIndex(IndexExpression that) {
that.getPrimary().visit(this);
ElementOrRange eor = that.getElementOrRange();
if (eor instanceof Element) {
if (TypeUtils.isUnknown(that.getPrimary().getTypeModel()) && dynblock > 0) {
out("[");
((Element)eor).getExpression().visit(this);
out("]");
} else {
out(".get(");
((Element)eor).getExpression().visit(this);
out(")");
}
} else {//range, or spread?
ElementRange er = (ElementRange)eor;
Expression sexpr = er.getLength();
if (sexpr == null) {
if (er.getLowerBound() == null) {
out(".spanTo(");
} else if (er.getUpperBound() == null) {
out(".spanFrom(");
} else {
out(".span(");
}
} else {
out(".segment(");
}
if (er.getLowerBound() != null) {
er.getLowerBound().visit(this);
if (er.getUpperBound() != null || sexpr != null) {
out(",");
}
}
if (er.getUpperBound() != null) {
er.getUpperBound().visit(this);
} else if (sexpr != null) {
sexpr.visit(this);
}
out(")");
}
}
public void visit(IndexExpression that) {
visitIndex(that);
}
/** Generates code for a case clause, as part of a switch statement. Each case
* is rendered as an if. */
private void caseClause(CaseClause cc, String expvar, Term switchTerm) {
out("if (");
final CaseItem item = cc.getCaseItem();
if (item instanceof IsCase) {
IsCase isCaseItem = (IsCase) item;
generateIsOfType(switchTerm, expvar, isCaseItem.getType(), null, false);
Variable caseVar = isCaseItem.getVariable();
if (caseVar != null) {
directAccess.add(caseVar.getDeclarationModel());
names.forceName(caseVar.getDeclarationModel(), expvar);
}
} else if (item instanceof SatisfiesCase) {
item.addError("case(satisfies) not yet supported");
out("true");
} else if (item instanceof MatchCase){
boolean first = true;
for (Expression exp : ((MatchCase)item).getExpressionList().getExpressions()) {
if (!first) out(" || ");
out(expvar, "==="); //TODO equality?
/*out(".equals(");*/
exp.visit(this);
//out(")==="); clAlias(); out("getTrue()");
first = false;
}
} else {
cc.addUnexpectedError("support for case of type " + cc.getClass().getSimpleName() + " not yet implemented");
}
out(") ");
encloseBlockInFunction(cc.getBlock());
}
@Override
public void visit(SwitchStatement that) {
if (comment) out("//Switch statement at ", that.getUnit().getFilename(), " (", that.getLocation(), ")");
endLine();
//Put the expression in a tmp var
final String expvar = names.createTempVariable("case");
out("var ", expvar, "=");
Expression expr = that.getSwitchClause().getExpression();
expr.visit(this);
endLine(true);
//For each case, do an if
boolean first = true;
for (CaseClause cc : that.getSwitchCaseList().getCaseClauses()) {
if (!first) out("else ");
caseClause(cc, expvar, expr.getTerm());
first = false;
}
if (that.getSwitchCaseList().getElseClause() != null) {
out("else ");
that.getSwitchCaseList().getElseClause().visit(this);
}
if (comment) {
out("//End switch statement at ", that.getUnit().getFilename(), " (", that.getLocation(), ")");
endLine();
}
}
/** Generates the code for an anonymous function defined inside an argument list. */
@Override
public void visit(final FunctionArgument that) {
singleExprFunction(that.getParameterLists(), that.getExpression(), that.getScope());
}
private void singleExprFunction(final List<ParameterList> paramLists,
final Expression expr, final Scope scope) {
generateParameterLists(paramLists, scope, new ParameterListCallback() {
@Override
public void completeFunction() {
beginBlock();
if (paramLists.size() == 1) { initSelf(scope); }
initParameters(paramLists.get(paramLists.size()-1), null);
out("return ");
expr.visit(GenerateJsVisitor.this);
out(";");
endBlock();
}
});
}
/** Generates the code for a function in a named argument list. */
@Override
public void visit(final MethodArgument that) {
generateParameterLists(that.getParameterLists(), that.getScope(),
new ParameterListCallback() {
@Override
public void completeFunction() {
Block block = that.getBlock();
SpecifierExpression specExpr = that.getSpecifierExpression();
if (specExpr != null) {
out("{return ");
specExpr.getExpression().visit(GenerateJsVisitor.this);
out(";}");
}
else if (block != null) {
block.visit(GenerateJsVisitor.this);
}
}
});
}
@Override
public void visit(SegmentOp that) {
String rhs = names.createTempVariable();
out("(function(){var ", rhs, "=");
that.getRightTerm().visit(this);
endLine(true);
out("if (", rhs, ">0){");
endLine();
String lhs = names.createTempVariable();
String end = names.createTempVariable();
out("var ", lhs, "=");
that.getLeftTerm().visit(this);
endLine(true);
out("var ", end, "=", lhs);
endLine(true);
out("for (var i=1; i<", rhs, "; i++){", end, "=", end, ".successor;}");
endLine();
out("return ", clAlias, "Range(");
out(lhs, ",", end, ")");
endLine();
out("}else return ", clAlias, "getEmpty();}())");
}
/** Generates the code for single or multiple parameter lists, with a callback function to generate the function blocks. */
private void generateParameterLists(List<ParameterList> plist, Scope scope,
ParameterListCallback callback) {
if (plist.size() == 1) {
out(function);
ParameterList paramList = plist.get(0);
paramList.visit(this);
callback.completeFunction();
} else {
int count=0;
for (ParameterList paramList : plist) {
if (count==0) {
out(function);
} else {
out("return function");
}
paramList.visit(this);
if (count == 0) {
beginBlock();
initSelf(scope);
initParameters(paramList, null);
}
else {
out("{");
}
count++;
}
callback.completeFunction();
for (int i=0; i < count; i++) {
endBlock(false, i==count-1);
}
}
}
/** Encloses the block in a function, IF NEEDED. */
void encloseBlockInFunction(Block block) {
boolean wrap=encloser.encloseBlock(block);
if (wrap) {
beginBlock();
Continuation c = new Continuation(block.getScope(), names);
continues.push(c);
out("var ", c.getContinueName(), "=false;"); endLine();
out("var ", c.getBreakName(), "=false;"); endLine();
out("var ", c.getReturnName(), "=(function()");
}
block.visit(this);
if (wrap) {
Continuation c = continues.pop();
out("());if(", c.getReturnName(), "!==undefined){return ", c.getReturnName(), ";}");
if (c.isContinued()) {
out("else if(", c.getContinueName(),"===true){continue;}");
}
if (c.isBreaked()) {
out("else if (", c.getBreakName(),"===true){break;}");
}
endBlockNewLine();
}
}
private static class Continuation {
private final String cvar;
private final String rvar;
private final String bvar;
private final Scope scope;
private boolean cused, bused;
public Continuation(Scope scope, JsIdentifierNames names) {
this.scope=scope;
cvar = names.createTempVariable("cntvar");
rvar = names.createTempVariable("retvar");
bvar = names.createTempVariable("brkvar");
}
public Scope getScope() { return scope; }
public String getContinueName() { return cvar; }
public String getBreakName() { return bvar; }
public String getReturnName() { return rvar; }
public void useContinue() { cused = true; }
public void useBreak() { bused=true; }
public boolean isContinued() { return cused; }
public boolean isBreaked() { return bused; } //"isBroken" sounds really really bad in this case
}
private static interface ParameterListCallback {
void completeFunction();
}
/** This interface is used inside type initialization method. */
private interface PrototypeInitCallback {
void addToPrototypeCallback();
}
@Override
public void visit(Tuple that) {
int count = 0;
SequencedArgument sarg = that.getSequencedArgument();
if (sarg == null) {
out(clAlias, "getEmpty()");
} else {
List<Map<TypeParameter,ProducedType>> targs = new ArrayList<Map<TypeParameter,ProducedType>>();
List<PositionalArgument> positionalArguments = sarg.getPositionalArguments();
boolean spread = !positionalArguments.isEmpty()
&& positionalArguments.get(positionalArguments.size()-1) instanceof Tree.ListedArgument == false;
int lim = positionalArguments.size()-1;
for (PositionalArgument expr : positionalArguments) {
if (count > 0) {
out(",");
}
ProducedType exprType = expr.getTypeModel();
if (count==lim && spread) {
if (exprType.getDeclaration().inherits(types.tuple)) {
expr.visit(this);
} else {
expr.visit(this);
out(".sequence");
}
} else {
out(clAlias, "Tuple(");
if (count > 0) {
for (Map.Entry<TypeParameter,ProducedType> e : targs.get(0).entrySet()) {
if (e.getKey().getName().equals("Rest")) {
targs.add(0, e.getValue().getTypeArguments());
}
}
} else {
targs.add(that.getTypeModel().getTypeArguments());
}
if (dynblock > 0 && TypeUtils.isUnknown(exprType) && expr instanceof ListedArgument) {
exprType = types.anything.getType();
TypeUtils.generateDynamicCheck(((ListedArgument)expr).getExpression(), exprType, this);
} else {
expr.visit(this);
}
}
count++;
}
if (!spread) {
if (count > 0) {
out(",");
}
out(clAlias, "getEmpty()");
} else {
count--;
}
for (Map<TypeParameter,ProducedType> t : targs) {
out(",");
TypeUtils.printTypeArguments(that, t, this);
out(")");
}
}
}
@Override
public void visit(Assertion that) {
out("//assert");
location(that);
String custom = "Assertion failed";
//Scan for a "doc" annotation with custom message
for (Annotation ann : that.getAnnotationList().getAnnotations()) {
BaseMemberExpression bme = (BaseMemberExpression)ann.getPrimary();
if ("doc".equals(bme.getDeclaration().getName())) {
custom = ((Tree.ListedArgument)ann.getPositionalArgumentList().getPositionalArguments().get(0)).getExpression().getTerm().getText();
}
}
endLine();
StringBuilder sb = new StringBuilder(custom).append(": '");
for (int i = that.getConditionList().getToken().getTokenIndex()+1;
i < that.getConditionList().getEndToken().getTokenIndex(); i++) {
sb.append(tokens.get(i).getText());
}
sb.append("' at ").append(that.getUnit().getFilename()).append(" (").append(
that.getConditionList().getLocation()).append(")");
conds.specialConditionsAndBlock(that.getConditionList(), null, "if (!");
//escape
custom = escapeStringLiteral(sb.toString());
out(") { throw ", clAlias, "AssertionException('", custom, "'); }");
endLine();
}
@Override
public void visit(Tree.DynamicClause that) {
dynblock++;
out("/*Begin dynamic block*/");
super.visit(that);
out("/*End dynamic block*/");
dynblock--;
}
/** Closes a native array and invokes reifyCeylonType with the specified type parameters. */
void closeSequenceWithReifiedType(Node that, Map<TypeParameter,ProducedType> types) {
out("].reifyCeylonType(");
TypeUtils.printTypeArguments(that, types, this);
out(")");
}
boolean isInDynamicBlock() {
return dynblock > 0;
}
}
| src/main/java/com/redhat/ceylon/compiler/js/GenerateJsVisitor.java | package com.redhat.ceylon.compiler.js;
import java.io.IOException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Stack;
import org.antlr.runtime.CommonToken;
import com.redhat.ceylon.compiler.typechecker.analyzer.AnalysisWarning;
import com.redhat.ceylon.compiler.typechecker.model.Class;
import com.redhat.ceylon.compiler.typechecker.model.ClassOrInterface;
import com.redhat.ceylon.compiler.typechecker.model.Declaration;
import com.redhat.ceylon.compiler.typechecker.model.Functional;
import com.redhat.ceylon.compiler.typechecker.model.Getter;
import com.redhat.ceylon.compiler.typechecker.model.ImportableScope;
import com.redhat.ceylon.compiler.typechecker.model.Interface;
import com.redhat.ceylon.compiler.typechecker.model.InterfaceAlias;
import com.redhat.ceylon.compiler.typechecker.model.Method;
import com.redhat.ceylon.compiler.typechecker.model.MethodOrValue;
import com.redhat.ceylon.compiler.typechecker.model.Module;
import com.redhat.ceylon.compiler.typechecker.model.Package;
import com.redhat.ceylon.compiler.typechecker.model.ProducedType;
import com.redhat.ceylon.compiler.typechecker.model.Scope;
import com.redhat.ceylon.compiler.typechecker.model.Setter;
import com.redhat.ceylon.compiler.typechecker.model.Specification;
import com.redhat.ceylon.compiler.typechecker.model.TypeDeclaration;
import com.redhat.ceylon.compiler.typechecker.model.TypeParameter;
import com.redhat.ceylon.compiler.typechecker.model.Util;
import com.redhat.ceylon.compiler.typechecker.model.Value;
import com.redhat.ceylon.compiler.typechecker.tree.*;
import com.redhat.ceylon.compiler.typechecker.tree.Tree.*;
public class GenerateJsVisitor extends Visitor
implements NaturalVisitor {
private boolean indent=true;
private boolean comment=true;
private boolean verbose=false;
private final Stack<Continuation> continues = new Stack<Continuation>();
private final EnclosingFunctionVisitor encloser = new EnclosingFunctionVisitor();
private final JsIdentifierNames names;
private final Set<Declaration> directAccess = new HashSet<Declaration>();
private final RetainedVars retainedVars = new RetainedVars();
private final Map<String, String> importedModules;
final ConditionGenerator conds;
private final InvocationGenerator invoker;
private final List<CommonToken> tokens;
private int dynblock;
private final class SuperVisitor extends Visitor {
private final List<Declaration> decs;
private SuperVisitor(List<Declaration> decs) {
this.decs = decs;
}
@Override
public void visit(QualifiedMemberOrTypeExpression qe) {
if (qe.getPrimary() instanceof Super) {
decs.add(qe.getDeclaration());
}
super.visit(qe);
}
@Override
public void visit(BaseMemberOrTypeExpression that) {
if (that.getSupertypeQualifier() != null) {
decs.add(that.getDeclaration());
}
super.visit(that);
}
@Override
public void visit(QualifiedType that) {
if (that.getOuterType() instanceof SuperType) {
decs.add(that.getDeclarationModel());
}
super.visit(that);
}
public void visit(Tree.ClassOrInterface qe) {
//don't recurse
if (qe instanceof ClassDefinition) {
ExtendedType extType = ((ClassDefinition) qe).getExtendedType();
if (extType != null) { super.visit(extType); }
}
}
}
private final class OuterVisitor extends Visitor {
boolean found = false;
private Declaration dec;
private OuterVisitor(Declaration dec) {
this.dec = dec;
}
@Override
public void visit(QualifiedMemberOrTypeExpression qe) {
if (qe.getPrimary() instanceof Outer ||
qe.getPrimary() instanceof This) {
if ( qe.getDeclaration().equals(dec) ) {
found = true;
}
}
super.visit(qe);
}
}
private List<? extends Statement> currentStatements = null;
private final TypeUtils types;
private final Writer out;
final boolean prototypeStyle;
private CompilationUnit root;
private static String clAlias="";
private static final String function="function ";
private boolean needIndent = true;
private int indentLevel = 0;
private static void setCLAlias(String alias) {
clAlias = alias + ".";
}
/** Returns the module name for the language module. */
static String getClAlias() { return clAlias; }
@Override
public void handleException(Exception e, Node that) {
that.addUnexpectedError(that.getMessage(e, this));
}
public GenerateJsVisitor(Writer out, boolean prototypeStyle, JsIdentifierNames names,
List<CommonToken> tokens, Map<String,String> imports, TypeUtils typeUtils) {
this.out = out;
this.prototypeStyle=prototypeStyle;
this.names = names;
conds = new ConditionGenerator(this, names, directAccess);
this.tokens = tokens;
importedModules = imports;
types = typeUtils;
invoker = new InvocationGenerator(this, names, retainedVars);
}
TypeUtils getTypeUtils() { return types; }
/** Tells the receiver whether to add comments to certain declarations. Default is true. */
public void setAddComments(boolean flag) { comment = flag; }
public boolean isAddComments() { return comment; }
/** Tells the receiver whether to indent the generated code. Default is true. */
public void setIndent(boolean flag) { indent = flag; }
/** Tells the receiver to be verbose (prints generated code to STDOUT in addition to writer) */
public void setVerbose(boolean flag) { verbose = flag; }
/** Returns the helper component to handle naming. */
JsIdentifierNames getNames() { return names; }
/** Print generated code to the Writer specified at creation time.
* Automatically prints indentation first if necessary.
* @param code The main code
* @param codez Optional additional strings to print after the main code. */
void out(String code, String... codez) {
try {
if (indent && needIndent) {
for (int i=0;i<indentLevel;i++) {
out.write(" ");
}
}
needIndent = false;
out.write(code);
for (String s : codez) {
out.write(s);
}
if (verbose) {
System.out.print(code);
for (String s : codez) {
System.out.print(s);
}
}
}
catch (IOException ioe) {
throw new RuntimeException("Generating JS code", ioe);
}
}
/** Prints a newline. Indentation will automatically be printed by {@link #out(String, String...)}
* when the next line is started. */
void endLine() {
endLine(false);
}
/** Prints a newline. Indentation will automatically be printed by {@link #out(String, String...)}
* when the next line is started.
* @param semicolon if <code>true</code> then a semicolon is printed at the end
* of the previous line*/
void endLine(boolean semicolon) {
if (semicolon) { out(";"); }
out("\n");
needIndent = true;
}
/** Calls {@link #endLine()} if the current position is not already the beginning
* of a line. */
void beginNewLine() {
if (!needIndent) { endLine(); }
}
/** Increases indentation level, prints opening brace and newline. Indentation will
* automatically be printed by {@link #out(String, String...)} when the next line is started. */
void beginBlock() {
indentLevel++;
out("{");
endLine();
}
/** Decreases indentation level, prints a closing brace in new line (using
* {@link #beginNewLine()}) and calls {@link #endLine()}. */
void endBlockNewLine() {
endBlock(false, true);
}
/** Decreases indentation level, prints a closing brace in new line (using
* {@link #beginNewLine()}) and calls {@link #endLine()}.
* @param semicolon if <code>true</code> then prints a semicolon after the brace*/
void endBlockNewLine(boolean semicolon) {
endBlock(semicolon, true);
}
/** Decreases indentation level and prints a closing brace in new line (using
* {@link #beginNewLine()}). */
void endBlock() {
endBlock(false, false);
}
/** Decreases indentation level and prints a closing brace in new line (using
* {@link #beginNewLine()}).
* @param semicolon if <code>true</code> then prints a semicolon after the brace
* @param newline if <code>true</code> then additionally calls {@link #endLine()} */
void endBlock(boolean semicolon, boolean newline) {
indentLevel--;
beginNewLine();
out(semicolon ? "};" : "}");
if (newline) { endLine(); }
}
/** Prints source code location in the form "at [filename] ([location])" */
void location(Node node) {
out(" at ", node.getUnit().getFilename(), " (", node.getLocation(), ")");
}
@Override
public void visit(CompilationUnit that) {
root = that;
Module clm = that.getUnit().getPackage().getModule()
.getLanguageModule();
if (!JsCompiler.compilingLanguageModule) {
require(clm);
setCLAlias(names.moduleAlias(clm));
}
for (CompilerAnnotation ca: that.getCompilerAnnotations()) {
ca.visit(this);
}
if (that.getImportList() != null) {
that.getImportList().visit(this);
}
visitStatements(that.getDeclarations());
}
public void visit(Import that) {
ImportableScope scope =
that.getImportMemberOrTypeList().getImportList().getImportedScope();
if (scope instanceof Package) {
require(((Package) scope).getModule());
}
}
private void require(Module mod) {
final String path = scriptPath(mod);
final String modAlias = names.moduleAlias(mod);
if (importedModules.put(path, modAlias) == null) {
out("var ", modAlias, "=require('", path, "');");
endLine();
}
}
private String scriptPath(Module mod) {
StringBuilder path = new StringBuilder(mod.getNameAsString().replace('.', '/')).append('/');
if (!mod.isDefault()) {
path.append(mod.getVersion()).append('/');
}
path.append(mod.getNameAsString());
if (!mod.isDefault()) {
path.append('-').append(mod.getVersion());
}
return path.toString();
}
@Override
public void visit(Parameter that) {
out(names.name(that.getDeclarationModel()));
}
@Override
public void visit(ParameterList that) {
out("(");
boolean first=true;
boolean ptypes = false;
//Check if this is the first parameter list
if (that.getScope() instanceof Method && that.getModel().isFirst()) {
ptypes = ((Method)that.getScope()).getTypeParameters() != null &&
!((Method)that.getScope()).getTypeParameters().isEmpty();
}
for (Parameter param: that.getParameters()) {
if (!first) out(",");
out(names.name(param.getDeclarationModel()));
first = false;
}
if (ptypes) {
if (!first) out(",");
out("$$$mptypes");
}
out(")");
}
private void visitStatements(List<? extends Statement> statements) {
List<String> oldRetainedVars = retainedVars.reset(null);
final List<? extends Statement> prevStatements = currentStatements;
currentStatements = statements;
for (int i=0; i<statements.size(); i++) {
Statement s = statements.get(i);
s.visit(this);
beginNewLine();
retainedVars.emitRetainedVars(this);
}
retainedVars.reset(oldRetainedVars);
currentStatements = prevStatements;
}
@Override
public void visit(Body that) {
visitStatements(that.getStatements());
}
@Override
public void visit(Block that) {
List<Statement> stmnts = that.getStatements();
if (stmnts.isEmpty()) {
out("{}");
}
else {
beginBlock();
initSelf(that);
visitStatements(stmnts);
endBlock();
}
}
private void initSelf(Block block) {
initSelf(block.getScope());
}
private void initSelf(Scope scope) {
if ((prototypeOwner != null) &&
((scope instanceof MethodOrValue)
|| (scope instanceof TypeDeclaration)
|| (scope instanceof Specification))) {
out("var ");
self(prototypeOwner);
out("=this;");
endLine();
}
}
private void comment(Tree.Declaration that) {
if (!comment) return;
endLine();
out("//", that.getNodeType(), " ", that.getDeclarationModel().getName());
location(that);
endLine();
}
private void var(Declaration d) {
out("var ", names.name(d), "=");
}
private boolean share(Declaration d) {
return share(d, true);
}
private boolean share(Declaration d, boolean excludeProtoMembers) {
boolean shared = false;
if (!(excludeProtoMembers && prototypeStyle && d.isClassOrInterfaceMember())
&& isCaptured(d)) {
beginNewLine();
outerSelf(d);
out(".", names.name(d), "=", names.name(d), ";");
endLine();
shared = true;
}
return shared;
}
@Override
public void visit(ClassDeclaration that) {
//Don't even bother with nodes that have errors
if (that.getErrors() != null && !that.getErrors().isEmpty()) {
//But warnings are ok
for (Message err : that.getErrors()) {
if (!(err instanceof AnalysisWarning)) {
return;
}
}
}
Class d = that.getDeclarationModel();
if (prototypeStyle && d.isClassOrInterfaceMember()) return;
comment(that);
Tree.ClassSpecifier ext = that.getClassSpecifier();
out(function, names.name(d), "(");
//Generate each parameter because we need to append one at the end
for (Parameter p: that.getParameterList().getParameters()) {
p.visit(this);
out(", ");
}
TypeArgumentList targs = ext.getType().getTypeArgumentList();
if (targs != null && !targs.getTypes().isEmpty()) {
out("$$targs$$,");
}
self(d);
out(")");
TypeDeclaration aliased = ext.getType().getDeclarationModel();
out("{return ");
qualify(ext.getType(), aliased);
out(names.name(aliased), "(");
if (ext.getInvocationExpression().getPositionalArgumentList() != null) {
ext.getInvocationExpression().getPositionalArgumentList().visit(this);
if (!ext.getInvocationExpression().getPositionalArgumentList().getPositionalArguments().isEmpty()) {
out(",");
}
} else {
out("/*PENDIENTE NAMED ARG CLASS DECL */");
}
if (targs != null && !targs.getTypes().isEmpty()) {
Map<TypeParameter, ProducedType> invargs = TypeUtils.matchTypeParametersWithArguments(
aliased.getTypeParameters(), targs.getTypeModels());
if (invargs != null) {
TypeUtils.printTypeArguments(that, invargs, this);
} else {
out("/*TARGS != TPARAMS!!!! WTF?????*/");
}
out(",");
}
self(d);
out(");}");
endLine();
out(names.name(d), ".$$=");
qualify(ext, aliased);
out(names.name(aliased), ".$$;");
endLine();
share(d);
}
private void addClassDeclarationToPrototype(TypeDeclaration outer, ClassDeclaration that) {
comment(that);
TypeDeclaration dec = that.getClassSpecifier().getType().getTypeModel().getDeclaration();
String path = qualifiedPath(that, dec, true);
if (path.length() > 0) {
path += '.';
}
out(names.self(outer), ".", names.name(that.getDeclarationModel()), "=",
path, names.name(dec), ";");
endLine();
}
@Override
public void visit(InterfaceDeclaration that) {
//Don't even bother with nodes that have errors
if (that.getErrors() != null && !that.getErrors().isEmpty()) return;
Interface d = that.getDeclarationModel();
if (prototypeStyle && d.isClassOrInterfaceMember()) return;
//It's pointless declaring interface aliases outside of classes/interfaces
Scope scope = that.getScope();
if (scope instanceof InterfaceAlias) {
scope = scope.getContainer();
if (!(scope instanceof ClassOrInterface)) return;
}
comment(that);
var(d);
TypeDeclaration dec = that.getTypeSpecifier().getType().getTypeModel()
.getDeclaration();
qualify(that,dec);
out(names.name(dec), ";");
endLine();
share(d);
}
private void addInterfaceDeclarationToPrototype(TypeDeclaration outer, InterfaceDeclaration that) {
comment(that);
TypeDeclaration dec = that.getTypeSpecifier().getType().getTypeModel().getDeclaration();
String path = qualifiedPath(that, dec, true);
if (path.length() > 0) {
path += '.';
}
out(names.self(outer), ".", names.name(that.getDeclarationModel()), "=",
path, names.name(dec), ";");
endLine();
}
private void addInterfaceToPrototype(ClassOrInterface type, InterfaceDefinition interfaceDef) {
interfaceDefinition(interfaceDef);
Interface d = interfaceDef.getDeclarationModel();
out(names.self(type), ".", names.name(d), "=", names.name(d), ";");
endLine();
}
@Override
public void visit(InterfaceDefinition that) {
//Don't even bother with nodes that have errors
if (that.getErrors() != null && !that.getErrors().isEmpty()) return;
if (!(prototypeStyle && that.getDeclarationModel().isClassOrInterfaceMember())) {
interfaceDefinition(that);
}
}
private void interfaceDefinition(InterfaceDefinition that) {
Interface d = that.getDeclarationModel();
comment(that);
out(function, names.name(d), "(");
self(d);
out(")");
beginBlock();
//declareSelf(d);
referenceOuter(d);
final List<Declaration> superDecs = new ArrayList<Declaration>();
if (!prototypeStyle) {
new SuperVisitor(superDecs).visit(that.getInterfaceBody());
}
callInterfaces(that.getSatisfiedTypes(), d, that, superDecs);
that.getInterfaceBody().visit(this);
//returnSelf(d);
endBlockNewLine();
share(d);
typeInitialization(that);
}
private void addClassToPrototype(ClassOrInterface type, ClassDefinition classDef) {
classDefinition(classDef);
Class d = classDef.getDeclarationModel();
out(names.self(type), ".", names.name(d), "=", names.name(d), ";");
endLine();
}
@Override
public void visit(ClassDefinition that) {
//Don't even bother with nodes that have errors
if (that.getErrors() != null && !that.getErrors().isEmpty()) return;
if (!(prototypeStyle && that.getDeclarationModel().isClassOrInterfaceMember())) {
classDefinition(that);
}
}
private void classDefinition(ClassDefinition that) {
Class d = that.getDeclarationModel();
comment(that);
out(function, names.name(d), "(");
for (Parameter p: that.getParameterList().getParameters()) {
p.visit(this);
out(", ");
}
boolean withTargs = that.getTypeParameterList() != null &&
!that.getTypeParameterList().getTypeParameterDeclarations().isEmpty();
if (withTargs) {
out("$$targs$$,");
}
self(d);
out(")");
beginBlock();
//This takes care of top-level attributes defined before the class definition
out("$init$", names.name(d), "();");
endLine();
declareSelf(d);
if (withTargs) {
out(clAlias, "set_type_args(");
self(d); out(",$$targs$$);"); endLine();
} else {
//Check if any of the satisfied types have type arguments
if (that.getSatisfiedTypes() != null) {
for(Tree.StaticType sat : that.getSatisfiedTypes().getTypes()) {
boolean first = true;
Map<TypeParameter,ProducedType> targs = sat.getTypeModel().getTypeArguments();
if (targs != null && !targs.isEmpty()) {
if (first) {
self(d); out(".$$targs$$=");
TypeUtils.printTypeArguments(that, targs, this);
endLine(true);
} else {
out("/*TODO: more type arguments*/");
endLine();
}
}
}
}
}
referenceOuter(d);
initParameters(that.getParameterList(), d);
final List<Declaration> superDecs = new ArrayList<Declaration>();
if (!prototypeStyle) {
new SuperVisitor(superDecs).visit(that.getClassBody());
}
callSuperclass(that.getExtendedType(), d, that, superDecs);
callInterfaces(that.getSatisfiedTypes(), d, that, superDecs);
that.getClassBody().visit(this);
returnSelf(d);
endBlockNewLine();
share(d);
typeInitialization(that);
}
private void referenceOuter(TypeDeclaration d) {
if (prototypeStyle && d.isClassOrInterfaceMember()) {
self(d);
out(".");
outerSelf(d);
out("=this;");
endLine();
}
}
private void copySuperMembers(TypeDeclaration typeDecl, final List<Declaration> decs, ClassOrInterface d) {
if (!prototypeStyle) {
for (Declaration dec: decs) {
if (!typeDecl.isMember(dec)) { continue; }
String suffix = names.scopeSuffix(dec.getContainer());
if (dec instanceof Value) {
superGetterRef(dec,d,suffix);
if (((Value) dec).isVariable()) {
superSetterRef(dec,d,suffix);
}
}
else if (dec instanceof Getter) {
superGetterRef(dec,d,suffix);
if (((Getter) dec).isVariable()) {
superSetterRef(dec,d,suffix);
}
}
else {
superRef(dec,d,suffix);
}
}
}
}
private void callSuperclass(ExtendedType extendedType, Class d, Node that,
final List<Declaration> superDecs) {
if (extendedType!=null) {
PositionalArgumentList argList = extendedType.getInvocationExpression()
.getPositionalArgumentList();
TypeDeclaration typeDecl = extendedType.getType().getDeclarationModel();
qualify(that, typeDecl);
out(memberAccessBase(extendedType.getType(), typeDecl, false, false),
(prototypeStyle && (getSuperMemberScope(extendedType.getType()) != null))
? ".call(this," : "(");
invoker.generatePositionalArguments(argList, argList.getPositionalArguments(), false);
if (argList.getPositionalArguments().size() > 0) {
out(",");
}
//There may be defaulted args we must pass as undefined
if (d.getExtendedTypeDeclaration().getParameterList().getParameters().size() > argList.getPositionalArguments().size()) {
List<com.redhat.ceylon.compiler.typechecker.model.Parameter> superParams = d.getExtendedTypeDeclaration().getParameterList().getParameters();
for (int i = argList.getPositionalArguments().size(); i < superParams.size(); i++) {
com.redhat.ceylon.compiler.typechecker.model.Parameter p = superParams.get(i);
if (p.isSequenced()) {
out(clAlias, "getEmpty(),");
} else {
out("undefined,");
}
}
}
//If the supertype has type arguments, add them to the call
if (typeDecl.getTypeParameters() != null && !typeDecl.getTypeParameters().isEmpty()) {
extendedType.getType().getTypeArgumentList().getTypeModels();
TypeUtils.printTypeArguments(that, TypeUtils.matchTypeParametersWithArguments(typeDecl.getTypeParameters(),
extendedType.getType().getTypeArgumentList().getTypeModels()), this);
out(",");
}
self(d);
out(");");
endLine();
copySuperMembers(typeDecl, superDecs, d);
}
}
private void callInterfaces(SatisfiedTypes satisfiedTypes, ClassOrInterface d, Node that,
final List<Declaration> superDecs) {
if (satisfiedTypes!=null) {
for (StaticType st: satisfiedTypes.getTypes()) {
TypeDeclaration typeDecl = st.getTypeModel().getDeclaration();
if (typeDecl.isAlias()) {
typeDecl = typeDecl.getExtendedTypeDeclaration();
}
qualify(that, typeDecl);
out(names.name((ClassOrInterface)typeDecl), "(");
self(d);
out(");");
endLine();
//Set the reified types from interfaces
Map<TypeParameter, ProducedType> reifs = st.getTypeModel().getTypeArguments();
if (reifs != null && !reifs.isEmpty()) {
for (Map.Entry<TypeParameter, ProducedType> e : reifs.entrySet()) {
if (e.getValue().getDeclaration() instanceof ClassOrInterface) {
out(clAlias, "add_type_arg(");
self(d);
out(",'", e.getKey().getName(), "',");
TypeUtils.typeNameOrList(that, e.getValue(), this, true);
out(");");
endLine();
}
}
}
copySuperMembers(typeDecl, superDecs, d);
}
}
}
/** Generates a function to initialize the specified type. */
private void typeInitialization(final Tree.Declaration type) {
ExtendedType extendedType = null;
SatisfiedTypes satisfiedTypes = null;
boolean isInterface = false;
ClassOrInterface decl = null;
if (type instanceof ClassDefinition) {
ClassDefinition classDef = (ClassDefinition) type;
extendedType = classDef.getExtendedType();
satisfiedTypes = classDef.getSatisfiedTypes();
decl = classDef.getDeclarationModel();
} else if (type instanceof InterfaceDefinition) {
satisfiedTypes = ((InterfaceDefinition) type).getSatisfiedTypes();
isInterface = true;
decl = ((InterfaceDefinition) type).getDeclarationModel();
} else if (type instanceof ObjectDefinition) {
ObjectDefinition objectDef = (ObjectDefinition) type;
extendedType = objectDef.getExtendedType();
satisfiedTypes = objectDef.getSatisfiedTypes();
decl = (ClassOrInterface)objectDef.getDeclarationModel().getTypeDeclaration();
}
final PrototypeInitCallback callback = new PrototypeInitCallback() {
@Override
public void addToPrototypeCallback() {
if (type instanceof ClassDefinition) {
addToPrototype(((ClassDefinition)type).getDeclarationModel(), ((ClassDefinition)type).getClassBody().getStatements());
} else if (type instanceof InterfaceDefinition) {
addToPrototype(((InterfaceDefinition)type).getDeclarationModel(), ((InterfaceDefinition)type).getInterfaceBody().getStatements());
}
}
};
typeInitialization(extendedType, satisfiedTypes, isInterface, decl, callback);
}
/** This is now the main method to generate the type initialization code.
* @param extendedType The type that is being extended.
* @param satisfiedTypes The types satisfied by the type being initialized.
* @param isInterface Tells whether the type being initialized is an interface
* @param d The declaration for the type being initialized
* @param callback A callback to add something more to the type initializer in prototype style.
*/
private void typeInitialization(ExtendedType extendedType, SatisfiedTypes satisfiedTypes, boolean isInterface,
ClassOrInterface d, PrototypeInitCallback callback) {
//Let's always use initTypeProto to avoid #113
String initFuncName = "initTypeProto";
out("function $init$", names.name(d), "()");
beginBlock();
out("if (", names.name(d), ".$$===undefined)");
beginBlock();
String qns = d.getQualifiedNameString();
if (JsCompiler.compilingLanguageModule && qns.indexOf("::") < 0) {
//Language module files get compiled in default module
//so they need to have this added to their qualified name
qns = "ceylon.language::" + qns;
}
out(clAlias, initFuncName, "(", names.name(d), ",'", qns, "'");
if (extendedType != null) {
String fname = typeFunctionName(extendedType.getType(), false);
out(",", fname);
} else if (!isInterface) {
out(",", clAlias, "Basic");
}
if (satisfiedTypes != null) {
for (StaticType satType : satisfiedTypes.getTypes()) {
TypeDeclaration tdec = satType.getTypeModel().getDeclaration();
if (tdec.isAlias()) {
tdec = tdec.getExtendedTypeDeclaration();
}
String fname = typeFunctionName(satType, true);
//Actually it could be "if not in same module"
if (!JsCompiler.compilingLanguageModule && declaredInCL(tdec)) {
out(",", fname);
} else {
int idx = fname.lastIndexOf('.');
if (idx > 0) {
fname = fname.substring(0, idx+1) + "$init$" + fname.substring(idx+1);
} else {
fname = "$init$" + fname;
}
out(",", fname, "()");
}
}
}
out(");");
//The class definition needs to be inside the init function if we want forwards decls to work in prototype style
if (prototypeStyle) {
endLine();
callback.addToPrototypeCallback();
}
endBlockNewLine();
out("return ", names.name(d), ";");
endBlockNewLine();
//If it's nested, share the init function
if (outerSelf(d)) {
out(".$init$", names.name(d), "=$init$", names.name(d), ";");
endLine();
}
out("$init$", names.name(d), "();");
endLine();
}
private String typeFunctionName(StaticType type, boolean removeAlias) {
TypeDeclaration d = type.getTypeModel().getDeclaration();
if (removeAlias && d.isAlias()) {
d = d.getExtendedTypeDeclaration();
}
boolean inProto = prototypeStyle
&& (type.getScope().getContainer() instanceof TypeDeclaration);
String constr = qualifiedPath(type, d, inProto);
if (constr.length() > 0) {
constr += '.';
}
constr += memberAccessBase(type, d, false, false);
return constr;
}
private void addToPrototype(ClassOrInterface d, List<Statement> statements) {
if (prototypeStyle && !statements.isEmpty()) {
final List<? extends Statement> prevStatements = currentStatements;
currentStatements = statements;
out("(function(", names.self(d), ")");
beginBlock();
for (Statement s: statements) {
addToPrototype(d, s);
}
endBlock();
out(")(", names.name(d), ".$$.prototype);");
endLine();
currentStatements = prevStatements;
}
}
private ClassOrInterface prototypeOwner;
private void addToPrototype(ClassOrInterface d, Statement s) {
ClassOrInterface oldPrototypeOwner = prototypeOwner;
prototypeOwner = d;
if (s instanceof MethodDefinition) {
addMethodToPrototype(d, (MethodDefinition)s);
} else if (s instanceof MethodDeclaration) {
methodDeclaration(d, (MethodDeclaration) s);
} else if (s instanceof AttributeGetterDefinition) {
addGetterToPrototype(d, (AttributeGetterDefinition)s);
} else if (s instanceof AttributeDeclaration) {
addGetterAndSetterToPrototype(d, (AttributeDeclaration) s);
} else if (s instanceof ClassDefinition) {
addClassToPrototype(d, (ClassDefinition) s);
} else if (s instanceof InterfaceDefinition) {
addInterfaceToPrototype(d, (InterfaceDefinition) s);
} else if (s instanceof ObjectDefinition) {
addObjectToPrototype(d, (ObjectDefinition) s);
} else if (s instanceof ClassDeclaration) {
addClassDeclarationToPrototype(d, (ClassDeclaration) s);
} else if (s instanceof InterfaceDeclaration) {
addInterfaceDeclarationToPrototype(d, (InterfaceDeclaration) s);
} else if (s instanceof SpecifierStatement) {
addSpecifierToPrototype(d, (SpecifierStatement) s);
}
prototypeOwner = oldPrototypeOwner;
}
private void declareSelf(ClassOrInterface d) {
out("if (");
self(d);
out("===undefined)");
self(d);
out("=new ");
if (prototypeStyle && d.isClassOrInterfaceMember()) {
out("this.", names.name(d), ".$$;");
} else {
out(names.name(d), ".$$;");
}
endLine();
/*out("var ");
self(d);
out("=");
self();
out(";");
endLine();*/
}
private void instantiateSelf(ClassOrInterface d) {
out("var ");
self(d);
out("=new ");
if (prototypeStyle && d.isClassOrInterfaceMember()) {
out("this.", names.name(d), ".$$;");
} else {
out(names.name(d), ".$$;");
}
endLine();
}
private void returnSelf(ClassOrInterface d) {
out("return ");
self(d);
out(";");
}
private void addObjectToPrototype(ClassOrInterface type, ObjectDefinition objDef) {
objectDefinition(objDef);
Value d = objDef.getDeclarationModel();
Class c = (Class) d.getTypeDeclaration();
out(names.self(type), ".", names.name(c), "=", names.name(c), ";");
endLine();
}
@Override
public void visit(ObjectDefinition that) {
//Don't even bother with nodes that have errors
if (that.getErrors() != null && !that.getErrors().isEmpty()) return;
Value d = that.getDeclarationModel();
if (!(prototypeStyle && d.isClassOrInterfaceMember())) {
objectDefinition(that);
} else {
Class c = (Class) d.getTypeDeclaration();
comment(that);
outerSelf(d);
out(".", names.name(d), "=");
outerSelf(d);
out(".", names.name(c), "();");
endLine();
}
}
private void objectDefinition(ObjectDefinition that) {
comment(that);
Value d = that.getDeclarationModel();
boolean addToPrototype = prototypeStyle && d.isClassOrInterfaceMember();
Class c = (Class) d.getTypeDeclaration();
out(function, names.name(c));
Map<TypeParameter, ProducedType> targs=new HashMap<TypeParameter, ProducedType>();
if (that.getSatisfiedTypes() != null) {
for (StaticType st : that.getSatisfiedTypes().getTypes()) {
Map<TypeParameter, ProducedType> stargs = st.getTypeModel().getTypeArguments();
if (stargs != null && !stargs.isEmpty()) {
targs.putAll(stargs);
}
}
}
out(targs.isEmpty()?"()":"($$targs$$)");
beginBlock();
instantiateSelf(c);
referenceOuter(c);
final List<Declaration> superDecs = new ArrayList<Declaration>();
if (!prototypeStyle) {
new SuperVisitor(superDecs).visit(that.getClassBody());
}
if (!targs.isEmpty()) {
self(c); out(".$$targs$$=$$targs$$;"); endLine();
}
callSuperclass(that.getExtendedType(), c, that, superDecs);
callInterfaces(that.getSatisfiedTypes(), c, that, superDecs);
that.getClassBody().visit(this);
returnSelf(c);
indentLevel--;
endLine();
out("}");
endLine();
typeInitialization(that);
addToPrototype(c, that.getClassBody().getStatements());
if (!addToPrototype) {
out("var ", names.name(d), "=", names.name(c), "(");
if (!targs.isEmpty()) {
TypeUtils.printTypeArguments(that, targs, this);
}
out(");");
endLine();
}
if (!defineAsProperty(d)) {
out("var ", names.getter(d), "=function()");
beginBlock();
out("return ");
if (addToPrototype) {
out("this.");
}
out(names.name(d), ";");
endBlockNewLine();
if (addToPrototype || d.isShared()) {
outerSelf(d);
out(".", names.getter(d), "=", names.getter(d), ";");
endLine();
}
}
}
private void superRef(Declaration d, ClassOrInterface sub, String parentSuffix) {
//if (d.isActual()) {
self(sub);
out(".", names.name(d), parentSuffix, "=");
self(sub);
out(".", names.name(d), ";");
endLine();
//}
}
private void superGetterRef(Declaration d, ClassOrInterface sub, String parentSuffix) {
if (defineAsProperty(d)) {
out(clAlias, "copySuperAttr(", names.self(sub), ",'", names.name(d), "','",
parentSuffix, "');");
}
else {
self(sub);
out(".", names.getter(d), parentSuffix, "=");
self(sub);
out(".", names.getter(d), ";");
}
endLine();
}
private void superSetterRef(Declaration d, ClassOrInterface sub, String parentSuffix) {
if (!defineAsProperty(d)) {
self(sub);
out(".", names.setter(d), parentSuffix, "=");
self(sub);
out(".", names.setter(d), ";");
endLine();
}
}
@Override
public void visit(MethodDeclaration that) {
//Don't even bother with nodes that have errors
if (that.getErrors() != null && !that.getErrors().isEmpty()) return;
methodDeclaration(null, that);
}
private void methodDeclaration(TypeDeclaration outer, MethodDeclaration that) {
Method m = that.getDeclarationModel();
if (that.getSpecifierExpression() != null) {
// method(params) => expr
if (outer == null) {
// Not in a prototype definition. Null to do here if it's a
// member in prototype style.
if (prototypeStyle && m.isMember()) { return; }
comment(that);
out("var ");
}
else {
// prototype definition
comment(that);
out(names.self(outer), ".");
}
out(names.name(m), "=");
singleExprFunction(that.getParameterLists(),
that.getSpecifierExpression().getExpression(), that.getScope());
endLine(true);
share(m);
}
else if (outer == null) { // don't do the following in a prototype definition
//Check for refinement of simple param declaration
if (m == that.getScope()) {
if (m.getContainer() instanceof Class && m.isClassOrInterfaceMember()) {
//Declare the method just by pointing to the param function
final String name = names.name(((Class)m.getContainer()).getParameter(m.getName()));
if (name != null) {
self((Class)m.getContainer());
out(".", names.name(m), "=", name, ";");
endLine();
}
} else if (m.getContainer() instanceof Method) {
//Declare the function just by forcing the name we used in the param list
final String name = names.name(((Method)m.getContainer()).getParameter(m.getName()));
if (names != null) {
names.forceName(m, name);
}
}
}
}
}
@Override
public void visit(MethodDefinition that) {
Method d = that.getDeclarationModel();
//Don't even bother with nodes that have errors
if (that.getErrors() != null && !that.getErrors().isEmpty()) return;
if (!((prototypeStyle && that.getDeclarationModel().isClassOrInterfaceMember()) || isNative(d))) {
comment(that);
methodDefinition(that);
}
}
private void methodDefinition(MethodDefinition that) {
Method d = that.getDeclarationModel();
if (that.getParameterLists().size() == 1) {
out(function, names.name(d));
ParameterList paramList = that.getParameterLists().get(0);
paramList.visit(this);
beginBlock();
initSelf(that.getBlock());
initParameters(paramList, null);
visitStatements(that.getBlock().getStatements());
endBlock();
} else {
int count=0;
for (ParameterList paramList : that.getParameterLists()) {
if (count==0) {
out(function, names.name(d));
} else {
out("return function");
}
paramList.visit(this);
beginBlock();
initSelf(that.getBlock());
initParameters(paramList, null);
count++;
}
visitStatements(that.getBlock().getStatements());
for (int i=0; i < count; i++) {
endBlock();
}
}
if (!share(d)) { out(";"); }
}
private void initParameters(ParameterList params, TypeDeclaration typeDecl) {
for (final Parameter param : params.getParameters()) {
com.redhat.ceylon.compiler.typechecker.model.Parameter pd = param.getDeclarationModel();
/*if (param instanceof ValueParameterDeclaration && ((ValueParameterDeclaration)param).getDeclarationModel().isHidden()) {
//TODO support new syntax for class and method parameters
//the declaration is actually different from the one we usually use
out("//HIDDEN! ", pd.getName(), "(", names.name(pd), ")"); endLine();
}*/
String paramName = names.name(pd);
if (param.getDefaultArgument() != null || pd.isSequenced()) {
out("if(", paramName, "===undefined){", paramName, "=");
if (param.getDefaultArgument() == null) {
out(clAlias, "getEmpty()");
} else {
final SpecifierExpression defaultExpr =
param.getDefaultArgument().getSpecifierExpression();
if ((param instanceof FunctionalParameterDeclaration)
&& (defaultExpr instanceof LazySpecifierExpression)) {
// function parameter defaulted using "=>"
singleExprFunction(
((FunctionalParameterDeclaration) param).getParameterLists(),
defaultExpr.getExpression(), null);
}
else {
defaultExpr.visit(this);
}
}
out(";}");
endLine();
}
if ((typeDecl != null) && pd.isCaptured()) {
self(typeDecl);
out(".", paramName, "=", paramName, ";");
endLine();
}
}
}
private void addMethodToPrototype(TypeDeclaration outer,
MethodDefinition that) {
Method d = that.getDeclarationModel();
if (!prototypeStyle||!d.isClassOrInterfaceMember()) return;
comment(that);
out(names.self(outer), ".", names.name(d), "=");
methodDefinition(that);
}
@Override
public void visit(AttributeGetterDefinition that) {
Getter d = that.getDeclarationModel();
if (prototypeStyle&&d.isClassOrInterfaceMember()) return;
comment(that);
if (defineAsProperty(d)) {
out(clAlias, "defineAttr(");
outerSelf(d);
out(",'", names.name(d), "',function()");
super.visit(that);
final AttributeSetterDefinition setterDef = associatedSetterDefinition(that);
if (setterDef != null) {
out(",function(", names.name(setterDef.getDeclarationModel().getParameter()), ")");
super.visit(setterDef);
}
out(");");
}
else {
out("var ", names.getter(d), "=function()");
super.visit(that);
if (!shareGetter(d)) { out(";"); }
}
}
private void addGetterToPrototype(TypeDeclaration outer,
AttributeGetterDefinition that) {
Getter d = that.getDeclarationModel();
if (!prototypeStyle||!d.isClassOrInterfaceMember()) return;
comment(that);
out(clAlias, "defineAttr(", names.self(outer), ",'", names.name(d),
"',function()");
super.visit(that);
final AttributeSetterDefinition setterDef = associatedSetterDefinition(that);
if (setterDef != null) {
out(",function(", names.name(setterDef.getDeclarationModel().getParameter()), ")");
super.visit(setterDef);
}
out(");");
}
private AttributeSetterDefinition associatedSetterDefinition(
AttributeGetterDefinition getterDef) {
final Setter setter = getterDef.getDeclarationModel().getSetter();
if ((setter != null) && (currentStatements != null)) {
for (Statement stmt : currentStatements) {
if (stmt instanceof AttributeSetterDefinition) {
final AttributeSetterDefinition setterDef =
(AttributeSetterDefinition) stmt;
if (setterDef.getDeclarationModel() == setter) {
return setterDef;
}
}
}
}
return null;
}
/** Exports a getter function; useful in non-prototype style. */
private boolean shareGetter(MethodOrValue d) {
boolean shared = false;
if (isCaptured(d)) {
beginNewLine();
outerSelf(d);
out(".", names.getter(d), "=", names.getter(d), ";");
endLine();
shared = true;
}
return shared;
}
@Override
public void visit(AttributeSetterDefinition that) {
Setter d = that.getDeclarationModel();
if ((prototypeStyle&&d.isClassOrInterfaceMember()) || defineAsProperty(d)) return;
comment(that);
out("var ", names.setter(d.getGetter()), "=function(", names.name(d.getParameter()), ")");
super.visit(that);
if (!shareSetter(d)) { out(";"); }
}
private boolean isCaptured(Declaration d) {
if (d.isToplevel()||d.isClassOrInterfaceMember()) { //TODO: what about things nested inside control structures
if (d.isShared() || d.isCaptured() ) {
return true;
}
else {
OuterVisitor ov = new OuterVisitor(d);
ov.visit(root);
return ov.found;
}
}
else {
return false;
}
}
private boolean shareSetter(MethodOrValue d) {
boolean shared = false;
if (isCaptured(d)) {
beginNewLine();
outerSelf(d);
out(".", names.setter(d), "=", names.setter(d), ";");
endLine();
shared = true;
}
return shared;
}
@Override
public void visit(AttributeDeclaration that) {
Value d = that.getDeclarationModel();
//Check if the attribute corresponds to a class parameter
//This is because of the new initializer syntax
String classParam = null;
if (d.getContainer() instanceof Functional) {
classParam = names.name(((Functional)d.getContainer()).getParameter(d.getName()));
}
if (!d.isFormal()) {
comment(that);
SpecifierOrInitializerExpression specInitExpr =
that.getSpecifierOrInitializerExpression();
if (prototypeStyle && d.isClassOrInterfaceMember()) {
if ((specInitExpr != null)
&& !(specInitExpr instanceof LazySpecifierExpression)) {
outerSelf(d);
out(".", names.name(d), "=");
super.visit(that);
endLine(true);
} else if (classParam != null) {
outerSelf(d);
out(".", names.name(d), "=", classParam);
endLine(true);
}
//TODO generate for => expr when no classParam is available
}
else if (specInitExpr instanceof LazySpecifierExpression) {
final boolean property = defineAsProperty(d);
if (property) {
out(clAlias, "defineAttr(");
outerSelf(d);
out(",'", names.name(d), "',function(){ return ");
} else {
out("var ", names.getter(d), "=function(){return ");
}
int boxType = boxStart(specInitExpr.getExpression().getTerm());
specInitExpr.getExpression().visit(this);
boxUnboxEnd(boxType);
out(";}");
if (property) {
out(");");
endLine();
} else {
endLine(true);
shareGetter(d);
}
}
else {
if ((specInitExpr != null) || (classParam != null) || !d.isMember()
|| d.isVariable()) {
generateAttributeGetter(d, specInitExpr, classParam);
}
if (d.isVariable() && !defineAsProperty(d)) {
final String varName = names.name(d);
String paramVarName = names.createTempVariable(d.getName());
out("var ", names.setter(d), "=function(", paramVarName, "){return ");
out(varName, "=", paramVarName, ";};");
endLine();
shareSetter(d);
}
}
}
}
private void generateAttributeGetter(MethodOrValue decl,
SpecifierOrInitializerExpression expr, String param) {
final String varName = names.name(decl);
out("var ", varName);
if (expr != null) {
out("=");
int boxType = boxStart(expr.getExpression().getTerm());
if (dynblock > 0 && TypeUtils.isUnknown(expr.getExpression().getTypeModel()) && !TypeUtils.isUnknown(decl.getType())) {
TypeUtils.generateDynamicCheck(expr.getExpression(), decl.getType(), this);
} else {
expr.visit(this);
}
boxUnboxEnd(boxType);
} else if (param != null) {
out("=", param);
}
endLine(true);
if (decl instanceof Method) {
if (decl.isClassOrInterfaceMember() && isCaptured(decl)) {
beginNewLine();
outerSelf(decl);
out(".", names.name(decl), "=", names.name(decl), ";");
endLine();
}
} else {
if (isCaptured(decl)) {
if (defineAsProperty(decl)) {
out(clAlias, "defineAttr(");
outerSelf(decl);
out(",'", varName, "',function(){return ", varName, ";}");
if (decl.isVariable()) {
final String par = names.createTempVariable(decl.getName());
out(",function(", par, "){return ", varName, "=", par, ";}");
}
out(");");
endLine();
}
else {
if (decl.isMember()) {
out("delete ");
outerSelf(decl);
out(".", varName);
endLine(true);
}
out("var ", names.getter(decl),"=function(){return ", varName, ";};");
endLine();
shareGetter(decl);
}
} else {
directAccess.add(decl);
}
}
}
private void addGetterAndSetterToPrototype(TypeDeclaration outer,
AttributeDeclaration that) {
Value d = that.getDeclarationModel();
if (!prototypeStyle||d.isToplevel()) return;
if (!d.isFormal()) {
comment(that);
String classParam = null;
if (d.getContainer() instanceof Functional) {
classParam = names.name(((Functional)d.getContainer()).getParameter(d.getName()));
}
if ((that.getSpecifierOrInitializerExpression() != null) || d.isVariable()
|| (classParam != null)) {
if (that.getSpecifierOrInitializerExpression()
instanceof LazySpecifierExpression) {
// attribute is defined by a lazy expression ("=>" syntax)
out(clAlias, "defineAttr(", names.self(outer), ",'", names.name(d),
"',function()");
beginBlock();
initSelf(that.getScope());
out("return ");
Expression expr = that.getSpecifierOrInitializerExpression().getExpression();
int boxType = boxStart(expr.getTerm());
expr.visit(this);
boxUnboxEnd(boxType);
endBlock();
out(")");
endLine(true);
}
else if (d.isActual()) {
out("delete ", names.self(outer), ".", names.name(d));
endLine(true);
}
}
}
}
@Override
public void visit(CharLiteral that) {
out(clAlias, "Character(");
out(String.valueOf(that.getText().codePointAt(1)));
out(")");
}
/** Escapes a StringLiteral (needs to be quoted). */
String escapeStringLiteral(String s) {
StringBuilder text = new StringBuilder(s);
//Escape special chars
for (int i=0; i < text.length();i++) {
switch(text.charAt(i)) {
case 8:text.replace(i, i+1, "\\b"); i++; break;
case 9:text.replace(i, i+1, "\\t"); i++; break;
case 10:text.replace(i, i+1, "\\n"); i++; break;
case 12:text.replace(i, i+1, "\\f"); i++; break;
case 13:text.replace(i, i+1, "\\r"); i++; break;
case 34:text.replace(i, i+1, "\\\""); i++; break;
case 39:text.replace(i, i+1, "\\'"); i++; break;
case 92:text.replace(i, i+1, "\\\\"); i++; break;
}
}
return text.toString();
}
@Override
public void visit(StringLiteral that) {
final int slen = that.getText().codePointCount(0, that.getText().length());
if (JsCompiler.compilingLanguageModule) {
out("String$(\"", escapeStringLiteral(that.getText()), "\",", Integer.toString(slen), ")");
} else {
out(clAlias, "String(\"", escapeStringLiteral(that.getText()), "\",", Integer.toString(slen), ")");
}
}
@Override
public void visit(StringTemplate that) {
List<StringLiteral> literals = that.getStringLiterals();
List<Expression> exprs = that.getExpressions();
out(clAlias, "StringBuilder().appendAll([");
boolean first = true;
for (int i = 0; i < literals.size(); i++) {
StringLiteral literal = literals.get(i);
if (!literal.getText().isEmpty()) {
if (!first) { out(","); }
first = false;
literal.visit(this);
}
if (i < exprs.size()) {
if (!first) { out(","); }
first = false;
exprs.get(i).visit(this);
out(".string");
}
}
out("]).string");
}
@Override
public void visit(FloatLiteral that) {
out(clAlias, "Float(", that.getText(), ")");
}
@Override
public void visit(NaturalLiteral that) {
char prefix = that.getText().charAt(0);
if (prefix == '$' || prefix == '#') {
int radix= prefix == '$' ? 2 : 16;
try {
out("(", new java.math.BigInteger(that.getText().substring(1), radix).toString(), ")");
} catch (NumberFormatException ex) {
that.addError("Invalid numeric literal " + that.getText());
}
} else {
out("(", that.getText(), ")");
}
}
@Override
public void visit(This that) {
self(Util.getContainingClassOrInterface(that.getScope()));
}
@Override
public void visit(Super that) {
self(Util.getContainingClassOrInterface(that.getScope()));
}
@Override
public void visit(Outer that) {
if (prototypeStyle) {
Scope scope = that.getScope();
while ((scope != null) && !(scope instanceof TypeDeclaration)) {
scope = scope.getContainer();
}
if (scope != null && ((TypeDeclaration)scope).isClassOrInterfaceMember()) {
self((TypeDeclaration) scope);
out(".");
}
}
self(that.getTypeModel().getDeclaration());
}
@Override
public void visit(BaseMemberExpression that) {
if (that.getErrors() != null && !that.getErrors().isEmpty()) {
//Don't even bother processing a node with errors
return;
}
Declaration decl = that.getDeclaration();
if (decl != null) {
String name = decl.getName();
String pkgName = decl.getUnit().getPackage().getQualifiedNameString();
// map Ceylon true/false/null directly to JS true/false/null
if ("ceylon.language".equals(pkgName)) {
if ("true".equals(name) || "false".equals(name) || "null".equals(name)) {
out(name);
return;
}
}
}
out(memberAccess(that));
}
private boolean accessDirectly(Declaration d) {
return !accessThroughGetter(d) || directAccess.contains(d);
}
private boolean accessThroughGetter(Declaration d) {
return (d instanceof MethodOrValue) && !(d instanceof Method)
&& !defineAsProperty(d);
}
private boolean defineAsProperty(Declaration d) {
// for now, only define member attributes as properties, not toplevel attributes
return d.isMember() && (d instanceof MethodOrValue) && !(d instanceof Method);
}
/** Returns true if the top-level declaration for the term is annotated "nativejs" */
private static boolean isNative(Term t) {
if (t instanceof MemberOrTypeExpression) {
return isNative(((MemberOrTypeExpression)t).getDeclaration());
}
return false;
}
/** Returns true if the declaration is annotated "nativejs" */
private static boolean isNative(Declaration d) {
return hasAnnotationByName(getToplevel(d), "nativejs") || TypeUtils.isUnknown(d);
}
private static Declaration getToplevel(Declaration d) {
while (d != null && !d.isToplevel()) {
Scope s = d.getContainer();
// Skip any non-declaration elements
while (s != null && !(s instanceof Declaration)) {
s = s.getContainer();
}
d = (Declaration) s;
}
return d;
}
private static boolean hasAnnotationByName(Declaration d, String name){
if (d != null) {
for(com.redhat.ceylon.compiler.typechecker.model.Annotation annotation : d.getAnnotations()){
if(annotation.getName().equals(name))
return true;
}
}
return false;
}
private void generateSafeOp(QualifiedMemberOrTypeExpression that) {
boolean isMethod = that.getDeclaration() instanceof Method;
String lhsVar = createRetainedTempVar("opt");
out("(", lhsVar, "=");
super.visit(that);
out(",");
if (isMethod) {
out(clAlias, "JsCallable(", lhsVar, ",");
}
out(lhsVar, "!==null?", lhsVar, ".", memberAccess(that), ":null)");
if (isMethod) {
out(")");
}
}
@Override
public void visit(QualifiedMemberExpression that) {
//Big TODO: make sure the member is actually
// refined by the current class!
if (that.getMemberOperator() instanceof SafeMemberOp) {
generateSafeOp(that);
} else if (that.getMemberOperator() instanceof SpreadOp) {
generateSpread(that);
} else if (that.getDeclaration() instanceof Method && that.getSignature() == null) {
//TODO right now this causes that all method invocations are done this way
//we need to filter somehow to only use this pattern when the result is supposed to be a callable
//looks like checking for signature is a good way (not THE way though; named arg calls don't have signature)
generateCallable(that, null);
} else {
super.visit(that);
out(".", memberAccess(that));
}
}
/** SpreadOp cannot be a simple function call because we need to reference the object methods directly, so it's a function */
private void generateSpread(QualifiedMemberOrTypeExpression that) {
//Determine if it's a method or attribute
boolean isMethod = that.getDeclaration() instanceof Method;
//Define a function
out("(function()");
beginBlock();
if (comment) {
out("//SpreadOp at ", that.getLocation());
endLine();
}
//Declare an array to store the values/references
String tmplist = names.createTempVariable("lst");
out("var ", tmplist, "=[];"); endLine();
//Get an iterator
String iter = names.createTempVariable("it");
out("var ", iter, "=");
super.visit(that);
out(".iterator;"); endLine();
//Iterate
String elem = names.createTempVariable("elem");
out("var ", elem, ";"); endLine();
out("while ((", elem, "=", iter, ".next())!==", clAlias, "getFinished())");
beginBlock();
//Add value or reference to the array
out(tmplist, ".push(");
if (isMethod) {
out("{o:", elem, ", f:", elem, ".", memberAccess(that), "}");
} else {
out(elem, ".", memberAccess(that));
}
out(");");
endBlockNewLine();
//Gather arguments to pass to the callable
//Return the array of values or a Callable with the arguments
out("return ", clAlias);
if (isMethod) {
out("JsCallableList(", tmplist, ");");
} else {
out("ArraySequence(", tmplist, ");");
}
endBlock();
out("())");
}
private void generateCallable(QualifiedMemberOrTypeExpression that, String name) {
String primaryVar = createRetainedTempVar("opt");
out("(", primaryVar, "=");
that.getPrimary().visit(this);
out(",", clAlias, "JsCallable(", primaryVar, ",", primaryVar, "!==null?",
primaryVar, ".", (name == null) ? memberAccess(that) : name, ":null))");
}
/**
* Checks if the given node is a MemberOrTypeExpression or QualifiedType which
* represents an access to a supertype member and returns the scope of that
* member or null.
*/
Scope getSuperMemberScope(Node node) {
Scope scope = null;
if (node instanceof BaseMemberOrTypeExpression) {
// Check for "Supertype::member"
BaseMemberOrTypeExpression bmte = (BaseMemberOrTypeExpression) node;
if (bmte.getSupertypeQualifier() != null) {
scope = bmte.getDeclaration().getContainer();
}
}
else if (node instanceof QualifiedMemberOrTypeExpression) {
// Check for "super.member"
QualifiedMemberOrTypeExpression qmte = (QualifiedMemberOrTypeExpression) node;
if (qmte.getPrimary() instanceof Super) {
scope = qmte.getDeclaration().getContainer();
}
}
else if (node instanceof QualifiedType) {
// Check for super.Membertype
QualifiedType qtype = (QualifiedType) node;
if (qtype.getOuterType() instanceof SuperType) {
scope = qtype.getDeclarationModel().getContainer();
}
}
return scope;
}
private String memberAccessBase(Node node, Declaration decl, boolean setter,
boolean qualifyBaseExpr) {
StringBuilder sb = new StringBuilder();
if (qualifyBaseExpr && (node instanceof BaseMemberOrTypeExpression)) {
BaseMemberOrTypeExpression bmte = (BaseMemberOrTypeExpression) node;
String path = qualifiedPath(node, bmte.getDeclaration());
if (path.length() > 0) {
sb.append(path);
sb.append(".");
}
}
Scope scope = getSuperMemberScope(node);
if (prototypeStyle && (scope != null)) {
sb.append("getT$all()['");
sb.append(scope.getQualifiedNameString());
sb.append("']");
if (defineAsProperty(decl)) {
return clAlias + (setter ? "attrSetter(" : "attrGetter(")
+ sb.toString() + ",'" + names.name(decl) + "')";
}
sb.append(".$$.prototype.");
}
final String member = (accessThroughGetter(decl) && !accessDirectly(decl))
? (setter ? names.setter(decl) : names.getter(decl)) : names.name(decl);
sb.append(member);
if (!prototypeStyle && (scope != null)) {
sb.append(names.scopeSuffix(scope));
}
//When compiling the language module we need to modify certain base type names
String rval = sb.toString();
if (TypeUtils.isReservedTypename(rval)) {
rval = sb.append("$").toString();
}
return rval;
}
/**
* Returns a string representing a read access to a member, as represented by
* the given expression. If the expression is a QualifiedMemberOrTypeExpression
* then the LHS is *not* included. If it is a BaseMemberOrTypeExpression and
* qualifyBaseExpr==true then the qualified path is included.
*/
private String memberAccess(StaticMemberOrTypeExpression expr, boolean qualifyBaseExpr) {
Declaration decl = expr.getDeclaration();
if (decl == null && dynblock > 0) {
return expr.getIdentifier().getText();
}
if (isNative(decl)) {
// direct access to a native element
return decl.getName();
}
boolean protoCall = prototypeStyle && (getSuperMemberScope(expr) != null);
if (accessDirectly(decl) && !(protoCall && defineAsProperty(decl))) {
// direct access, without getter
return memberAccessBase(expr, decl, false, qualifyBaseExpr);
}
// access through getter
return memberAccessBase(expr, decl, false, qualifyBaseExpr)
+ (protoCall ? ".call(this)" : "()");
}
private String memberAccess(StaticMemberOrTypeExpression expr) {
return memberAccess(expr, true);
}
private static interface MemberAccessCallback {
public void generateValue();
}
/**
* Generates a write access to a member, as represented by the given expression.
* The given callback is responsible for generating the assigned value.
* If the expression is a QualifiedMemberOrTypeExpression then the
* LHS is *not* included. If it is a BaseMemberOrTypeExpression and
* qualifyBaseExpr==true then the qualified path is included.
*/
private void generateMemberAccess(StaticMemberOrTypeExpression expr,
MemberAccessCallback callback, boolean qualifyBaseExpr) {
Declaration decl = expr.getDeclaration();
boolean paren = false;
if (decl == null && dynblock > 0) {
out(expr.getIdentifier().getText(), "=");
} else if (isNative(decl)) {
// direct access to a native element
out(decl.getName(), "=");
}
else {
boolean protoCall = prototypeStyle && (getSuperMemberScope(expr) != null);
if (accessDirectly(decl) && !(protoCall && defineAsProperty(decl))) {
// direct access, without setter
out(memberAccessBase(expr, decl, true, qualifyBaseExpr), "=");
}
else {
// access through setter
out(memberAccessBase(expr, decl, true, qualifyBaseExpr),
protoCall ? ".call(this," : "(");
paren = true;
}
}
callback.generateValue();
if (paren) { out(")"); }
}
private void generateMemberAccess(StaticMemberOrTypeExpression expr, final String strValue,
boolean qualifyBaseExpr) {
generateMemberAccess(expr, new MemberAccessCallback() {
@Override public void generateValue() { out(strValue); }
}, qualifyBaseExpr);
}
@Override
public void visit(BaseTypeExpression that) {
if (that.getErrors() != null && !that.getErrors().isEmpty()) return;
qualify(that, that.getDeclaration());
out(names.name(that.getDeclaration()));
}
@Override
public void visit(QualifiedTypeExpression that) {
if (that.getMemberOperator() instanceof SafeMemberOp) {
generateCallable(that, names.name(that.getDeclaration()));
} else {
super.visit(that);
out(".", names.name(that.getDeclaration()));
}
}
public void visit(Dynamic that) {
//this is value{xxx}
invoker.nativeObject(that.getNamedArgumentList());
}
@Override
public void visit(InvocationExpression that) {
invoker.generateInvocation(that);
}
@Override
public void visit(PositionalArgumentList that) {
invoker.generatePositionalArguments(that, that.getPositionalArguments(), false);
}
/** Box a term, visit it, unbox it. */
private void box(Term term) {
final int t = boxStart(term);
term.visit(this);
boxUnboxEnd(t);
}
// Make sure fromTerm is compatible with toTerm by boxing it when necessary
private int boxStart(Term fromTerm) {
boolean fromNative = isNative(fromTerm);
boolean toNative = false;
ProducedType fromType = fromTerm.getTypeModel();
return boxUnboxStart(fromNative, fromType, toNative);
}
// Make sure fromTerm is compatible with toTerm by boxing or unboxing it when necessary
int boxUnboxStart(Term fromTerm, Term toTerm) {
boolean fromNative = isNative(fromTerm);
boolean toNative = isNative(toTerm);
ProducedType fromType = fromTerm.getTypeModel();
return boxUnboxStart(fromNative, fromType, toNative);
}
// Make sure fromTerm is compatible with toDecl by boxing or unboxing it when necessary
int boxUnboxStart(Term fromTerm, com.redhat.ceylon.compiler.typechecker.model.TypedDeclaration toDecl) {
boolean fromNative = isNative(fromTerm);
boolean toNative = isNative(toDecl);
ProducedType fromType = fromTerm.getTypeModel();
return boxUnboxStart(fromNative, fromType, toNative);
}
int boxUnboxStart(boolean fromNative, ProducedType fromType, boolean toNative) {
if (fromNative != toNative) {
// Box the value
String fromTypeName = TypeUtils.isUnknown(fromType) ? "UNKNOWN" : fromType.getProducedTypeQualifiedName();
if (fromNative) {
// conversion from native value to Ceylon value
if (fromTypeName.equals("ceylon.language::String")) {
if (JsCompiler.compilingLanguageModule) {
out("String$(");
} else {
out(clAlias, "String(");
}
} else if (fromTypeName.equals("ceylon.language::Integer")) {
out("(");
} else if (fromTypeName.equals("ceylon.language::Float")) {
out(clAlias, "Float(");
} else if (fromTypeName.equals("ceylon.language::Boolean")) {
out("(");
} else if (fromTypeName.equals("ceylon.language::Character")) {
out(clAlias, "Character(");
} else {
return 0;
}
return 1;
} else if ("ceylon.language::String".equals(fromTypeName)
|| "ceylon.language::Float".equals(fromTypeName)) {
// conversion from Ceylon String or Float to native value
return 2;
} else {
return 3;
}
}
return 0;
}
void boxUnboxEnd(int boxType) {
switch (boxType) {
case 1: out(")"); break;
case 2: out(".valueOf()"); break;
default: //nothing
}
}
@Override
public void visit(ObjectArgument that) {
//Don't even bother with nodes that have errors
if (that.getErrors() != null && !that.getErrors().isEmpty()) return;
final Class c = (Class)that.getDeclarationModel().getTypeDeclaration();
out("(function()");
beginBlock();
out("//ObjectArgument ", that.getIdentifier().getText());
location(that);
endLine();
out(function, names.name(c), "()");
beginBlock();
instantiateSelf(c);
referenceOuter(c);
ExtendedType xt = that.getExtendedType();
final ClassBody body = that.getClassBody();
SatisfiedTypes sts = that.getSatisfiedTypes();
final List<Declaration> superDecs = new ArrayList<Declaration>();
if (!prototypeStyle) {
new SuperVisitor(superDecs).visit(that.getClassBody());
}
callSuperclass(xt, c, that, superDecs);
callInterfaces(sts, c, that, superDecs);
body.visit(this);
returnSelf(c);
indentLevel--;
endLine();
out("}");
endLine();
typeInitialization(xt, sts, false, c, new PrototypeInitCallback() {
@Override
public void addToPrototypeCallback() {
addToPrototype(c, body.getStatements());
}
});
out("return ", names.name(c), "(new ", names.name(c), ".$$);");
endBlock();
out("())");
}
@Override
public void visit(AttributeArgument that) {
out("(function()");
beginBlock();
out("//AttributeArgument ", that.getParameter().getName());
location(that);
endLine();
Block block = that.getBlock();
SpecifierExpression specExpr = that.getSpecifierExpression();
if (specExpr != null) {
out("return ");
specExpr.getExpression().visit(this);
out(";");
}
else if (block != null) {
visitStatements(block.getStatements());
}
endBlock();
out("())");
}
@Override
public void visit(SequencedArgument that) {
List<PositionalArgument> positionalArguments = that.getPositionalArguments();
boolean spread = !positionalArguments.isEmpty()
&& positionalArguments.get(positionalArguments.size()-1) instanceof Tree.ListedArgument == false;
if (!spread) { out("["); }
boolean first=true;
for (PositionalArgument arg: positionalArguments) {
if (!first) out(",");
if (arg instanceof Tree.ListedArgument) {
((Tree.ListedArgument) arg).getExpression().visit(this);
} else if(arg instanceof Tree.SpreadArgument)
((Tree.SpreadArgument) arg).getExpression().visit(this);
else // comprehension
arg.visit(this);
first = false;
}
if (!spread) { out("]"); }
}
@Override
public void visit(SequenceEnumeration that) {
SequencedArgument sarg = that.getSequencedArgument();
if (sarg == null) {
out(clAlias, "getEmpty()");
} else {
List<PositionalArgument> positionalArguments = sarg.getPositionalArguments();
int lim = positionalArguments.size()-1;
boolean spread = !positionalArguments.isEmpty()
&& positionalArguments.get(positionalArguments.size()-1) instanceof Tree.ListedArgument == false;
int count=0;
ProducedType chainedType = null;
if (lim>0 || !spread) {
out("[");
}
for (PositionalArgument expr : positionalArguments) {
if (count==lim && spread) {
if (lim > 0) {
ProducedType seqType = TypeUtils.findSupertype(types.iterable, that.getTypeModel());
closeSequenceWithReifiedType(that, seqType.getTypeArguments());
out(".chain(");
chainedType = TypeUtils.findSupertype(types.iterable, expr.getTypeModel());
}
count--;
} else {
if (count > 0) {
out(",");
}
}
if (dynblock > 0 && expr instanceof ListedArgument && TypeUtils.isUnknown(expr.getTypeModel())) {
TypeUtils.generateDynamicCheck(((ListedArgument)expr).getExpression(), types.anything.getType(), this);
} else {
expr.visit(this);
}
count++;
}
if (chainedType == null) {
if (!spread) {
closeSequenceWithReifiedType(that, that.getTypeModel().getTypeArguments());
}
} else {
out(",");
TypeUtils.printTypeArguments(that, chainedType.getTypeArguments(), this);
out(")");
}
}
}
@Override
public void visit(Comprehension that) {
new ComprehensionGenerator(this, names, directAccess).generateComprehension(that);
}
@Override
public void visit(final SpecifierStatement that) {
// A lazy specifier expression in a class/interface should go into the
// prototype in prototype style, so don't generate them here.
if (!(prototypeStyle && (that.getSpecifierExpression() instanceof LazySpecifierExpression)
&& (that.getScope().getContainer() instanceof TypeDeclaration))) {
specifierStatement(null, that);
}
}
private void specifierStatement(final TypeDeclaration outer,
final SpecifierStatement specStmt) {
if (specStmt.getBaseMemberExpression() instanceof BaseMemberExpression) {
BaseMemberExpression bme = (BaseMemberExpression) specStmt.getBaseMemberExpression();
Declaration bmeDecl = bme.getDeclaration();
if (specStmt.getSpecifierExpression() instanceof LazySpecifierExpression) {
// attr => expr;
final boolean property = defineAsProperty(bmeDecl);
if (property) {
out(clAlias, "defineAttr(", qualifiedPath(specStmt, bmeDecl), ",'",
names.name(bmeDecl), "',function()");
} else {
if (bmeDecl.isMember()) {
qualify(specStmt, bmeDecl);
} else {
out ("var ");
}
out(names.getter(bmeDecl), "=function()");
}
beginBlock();
if (outer != null) { initSelf(specStmt.getScope()); }
out ("return ");
specStmt.getSpecifierExpression().visit(this);
out(";");
endBlock();
if (property) { out(")"); }
endLine(true);
directAccess.remove(bmeDecl);
}
else if (outer != null) {
// "attr = expr;" in a prototype definition
if (bmeDecl.isMember() && (bmeDecl instanceof Value) && bmeDecl.isActual()) {
out("delete ", names.self(outer), ".", names.name(bmeDecl));
endLine(true);
}
}
else if (bmeDecl instanceof MethodOrValue) {
// "attr = expr;" in an initializer or method
final MethodOrValue moval = (MethodOrValue)bmeDecl;
if (moval.isVariable()) {
// simple assignment to a variable attribute
generateMemberAccess(bme, new MemberAccessCallback() {
@Override public void generateValue() {
int boxType = boxUnboxStart(specStmt.getSpecifierExpression().getExpression().getTerm(),
moval);
if (dynblock > 0 && !TypeUtils.isUnknown(moval.getType())
&& TypeUtils.isUnknown(specStmt.getSpecifierExpression().getExpression().getTypeModel())) {
TypeUtils.generateDynamicCheck(specStmt.getSpecifierExpression().getExpression(),
moval.getType(), GenerateJsVisitor.this);
} else {
specStmt.getSpecifierExpression().getExpression().visit(GenerateJsVisitor.this);
}
boxUnboxEnd(boxType);
}
}, true);
out(";");
} else if (moval.isMember()) {
// Specifier for a member attribute. This actually defines the
// member (e.g. in shortcut refinement syntax the attribute
// declaration itself can be omitted), so generate the attribute.
generateAttributeGetter(moval,
specStmt.getSpecifierExpression(), null);
} else {
// Specifier for some other attribute, or for a method.
if (prototypeStyle
|| (bmeDecl.isMember() && (bmeDecl instanceof Method))) {
qualify(specStmt, bmeDecl);
}
out(names.name(bmeDecl), "=");
if (dynblock > 0 && TypeUtils.isUnknown(specStmt.getSpecifierExpression().getExpression().getTypeModel())) {
TypeUtils.generateDynamicCheck(specStmt.getSpecifierExpression().getExpression(),
bme.getTypeModel(), this);
} else {
specStmt.getSpecifierExpression().visit(this);
}
out(";");
}
}
}
else if ((specStmt.getBaseMemberExpression() instanceof ParameterizedExpression)
&& (specStmt.getSpecifierExpression() != null)) {
final ParameterizedExpression paramExpr =
(ParameterizedExpression) specStmt.getBaseMemberExpression();
if (paramExpr.getPrimary() instanceof BaseMemberExpression) {
// func(params) => expr;
BaseMemberExpression bme = (BaseMemberExpression) paramExpr.getPrimary();
Declaration bmeDecl = bme.getDeclaration();
if (bmeDecl.isMember()) {
qualify(specStmt, bmeDecl);
} else {
out("var ");
}
out(names.name(bmeDecl), "=");
singleExprFunction(paramExpr.getParameterLists(),
specStmt.getSpecifierExpression().getExpression(),
specStmt.getScope());
out(";");
}
}
}
private void addSpecifierToPrototype(final TypeDeclaration outer,
final SpecifierStatement specStmt) {
specifierStatement(outer, specStmt);
}
@Override
public void visit(final AssignOp that) {
String returnValue = null;
StaticMemberOrTypeExpression lhsExpr = null;
if (dynblock > 0 && TypeUtils.isUnknown(that.getLeftTerm().getTypeModel())) {
that.getLeftTerm().visit(this);
out("=");
that.getRightTerm().visit(this);
return;
}
out("(");
if (that.getLeftTerm() instanceof BaseMemberExpression) {
BaseMemberExpression bme = (BaseMemberExpression) that.getLeftTerm();
lhsExpr = bme;
Declaration bmeDecl = bme.getDeclaration();
boolean simpleSetter = hasSimpleGetterSetter(bmeDecl);
if (!simpleSetter) {
returnValue = memberAccess(bme);
}
} else if (that.getLeftTerm() instanceof QualifiedMemberExpression) {
QualifiedMemberExpression qme = (QualifiedMemberExpression)that.getLeftTerm();
lhsExpr = qme;
boolean simpleSetter = hasSimpleGetterSetter(qme.getDeclaration());
String lhsVar = null;
if (!simpleSetter) {
lhsVar = createRetainedTempVar();
out(lhsVar, "=");
super.visit(qme);
out(",", lhsVar, ".");
returnValue = lhsVar + "." + memberAccess(qme);
} else {
super.visit(qme);
out(".");
}
}
generateMemberAccess(lhsExpr, new MemberAccessCallback() {
@Override public void generateValue() {
int boxType = boxUnboxStart(that.getRightTerm(), that.getLeftTerm());
that.getRightTerm().visit(GenerateJsVisitor.this);
boxUnboxEnd(boxType);
}
}, true);
if (returnValue != null) { out(",", returnValue); }
out(")");
}
/** Outputs the module name for the specified declaration. Returns true if something was output. */
boolean qualify(Node that, Declaration d) {
if (d.getUnit().getPackage().getModule().isDefault()) {
return false;
}
String path = qualifiedPath(that, d);
if (path.length() > 0) {
out(path, ".");
}
return path.length() > 0;
}
private String qualifiedPath(Node that, Declaration d) {
return qualifiedPath(that, d, false);
}
private String qualifiedPath(Node that, Declaration d, boolean inProto) {
boolean isMember = d.isClassOrInterfaceMember();
if (!isMember && isImported(that, d)) {
return names.moduleAlias(d.getUnit().getPackage().getModule());
}
else if (prototypeStyle && !inProto) {
if (isMember && !(d instanceof com.redhat.ceylon.compiler.typechecker.model.Parameter
&& !d.isCaptured())) {
TypeDeclaration id = that.getScope().getInheritingDeclaration(d);
if (id == null) {
//a local declaration of some kind,
//perhaps in an outer scope
id = (TypeDeclaration) d.getContainer();
} //else {
//an inherited declaration that might be
//inherited by an outer scope
//}
String path = "";
Scope scope = that.getScope();
// if (inProto) {
// while ((scope != null) && (scope instanceof TypeDeclaration)) {
// scope = scope.getContainer();
// }
// }
if ((scope != null) && ((that instanceof ClassDeclaration)
|| (that instanceof InterfaceDeclaration))) {
// class/interface aliases have no own "this"
scope = scope.getContainer();
}
while (scope != null) {
if (scope instanceof TypeDeclaration) {
if (path.length() > 0) {
path += '.';
}
path += names.self((TypeDeclaration) scope);
} else {
path = "";
}
if (scope == id) {
break;
}
scope = scope.getContainer();
}
return path;
}
}
else if (d != null && (d.isShared() || inProto) && isMember) {
TypeDeclaration id = that.getScope().getInheritingDeclaration(d);
if (id==null) {
//a shared local declaration
return names.self((TypeDeclaration)d.getContainer());
}
else {
//an inherited declaration that might be
//inherited by an outer scope
return names.self(id);
}
}
return "";
}
/** Tells whether a declaration is in the same package as a node. */
private boolean isImported(Node that, Declaration d) {
if (d == null) {
return false;
}
Package p1 = d.getUnit().getPackage();
Package p2 = that == null ? null : that.getUnit().getPackage();
return !p1.equals(p2);
}
@Override
public void visit(ExecutableStatement that) {
super.visit(that);
endLine(true);
}
/** Creates a new temporary variable which can be used immediately, even
* inside an expression. The declaration for that temporary variable will be
* emitted after the current Ceylon statement has been completely processed.
* The resulting code is valid because JavaScript variables may be used before
* they are declared. */
private String createRetainedTempVar(String baseName) {
String varName = names.createTempVariable(baseName);
retainedVars.add(varName);
return varName;
}
private String createRetainedTempVar() {
return createRetainedTempVar("tmp");
}
// @Override
// public void visit(Expression that) {
// if (that.getTerm() instanceof QualifiedMemberOrTypeExpression) {
// QualifiedMemberOrTypeExpression term = (QualifiedMemberOrTypeExpression) that.getTerm();
// // References to methods of types from other packages always need
// // special treatment, even if prototypeStyle==false, because they
// // may have been generated in prototype style. In particular,
// // ceylon.language is always in prototype style.
// if ((term.getDeclaration() instanceof Functional)
// && (prototypeStyle || !declaredInThisPackage(term.getDeclaration()))) {
// if (term.getMemberOperator() instanceof SpreadOp) {
// generateSpread(term);
// } else {
// generateCallable(term, names.name(term.getDeclaration()));
// }
// return;
// }
// }
// super.visit(that);
// }
@Override
public void visit(Return that) {
out("return ");
super.visit(that);
}
@Override
public void visit(AnnotationList that) {}
void self(TypeDeclaration d) {
out(names.self(d));
}
/* * Output the name of a variable that receives the type parameter info, usually in the class constructor. * /
private void selfTypeParameters(TypeDeclaration d) {
out(selfTypeParametersString(d));
}
private String selfTypeParametersString(TypeDeclaration d) {
return "$$typeParms" + d.getName();
}*/
/*private void self() {
out("$$");
}*/
private boolean outerSelf(Declaration d) {
if (d.isToplevel()) {
out("exports");
return true;
}
else if (d.isClassOrInterfaceMember()) {
self((TypeDeclaration)d.getContainer());
return true;
}
return false;
}
private boolean declaredInCL(Declaration decl) {
return decl.getUnit().getPackage().getQualifiedNameString()
.startsWith("ceylon.language");
}
@Override
public void visit(SumOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
termgen.left();
out(".plus(");
termgen.right();
out(")");
}
});
}
@Override
public void visit(DifferenceOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
termgen.left();
out(".minus(");
termgen.right();
out(")");
}
});
}
@Override
public void visit(ProductOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
termgen.left();
out(".times(");
termgen.right();
out(")");
}
});
}
@Override
public void visit(QuotientOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
termgen.left();
out(".divided(");
termgen.right();
out(")");
}
});
}
@Override public void visit(RemainderOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
termgen.left();
out(".remainder(");
termgen.right();
out(")");
}
});
}
@Override public void visit(PowerOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
termgen.left();
out(".power(");
termgen.right();
out(")");
}
});
}
@Override public void visit(AddAssignOp that) {
arithmeticAssignOp(that, "plus");
}
@Override public void visit(SubtractAssignOp that) {
arithmeticAssignOp(that, "minus");
}
@Override public void visit(MultiplyAssignOp that) {
arithmeticAssignOp(that, "times");
}
@Override public void visit(DivideAssignOp that) {
arithmeticAssignOp(that, "divided");
}
@Override public void visit(RemainderAssignOp that) {
arithmeticAssignOp(that, "remainder");
}
private void arithmeticAssignOp(final ArithmeticAssignmentOp that,
final String functionName) {
Term lhs = that.getLeftTerm();
if (lhs instanceof BaseMemberExpression) {
BaseMemberExpression lhsBME = (BaseMemberExpression) lhs;
Declaration lhsDecl = lhsBME.getDeclaration();
final String getLHS = memberAccess(lhsBME);
out("(");
generateMemberAccess(lhsBME, new MemberAccessCallback() {
@Override public void generateValue() {
out(getLHS, ".", functionName, "(");
that.getRightTerm().visit(GenerateJsVisitor.this);
out(")");
}
}, true);
if (!hasSimpleGetterSetter(lhsDecl)) { out(",", getLHS); }
out(")");
} else if (lhs instanceof QualifiedMemberExpression) {
QualifiedMemberExpression lhsQME = (QualifiedMemberExpression) lhs;
if (isNative(lhsQME)) {
// ($1.foo = Box($1.foo).operator($2))
out("(");
lhsQME.getPrimary().visit(this);
out(".", lhsQME.getDeclaration().getName());
out("=");
int boxType = boxStart(lhsQME);
lhsQME.getPrimary().visit(this);
out(".", lhsQME.getDeclaration().getName());
boxUnboxEnd(boxType);
out(".", functionName, "(");
that.getRightTerm().visit(this);
out("))");
} else {
final String lhsPrimaryVar = createRetainedTempVar();
final String getLHS = lhsPrimaryVar + "." + memberAccess(lhsQME);
out("(", lhsPrimaryVar, "=");
lhsQME.getPrimary().visit(this);
out(",", lhsPrimaryVar, ".");
generateMemberAccess(lhsQME, new MemberAccessCallback() {
@Override public void generateValue() {
out(getLHS, ".", functionName, "(");
that.getRightTerm().visit(GenerateJsVisitor.this);
out(")");
}
}, false);
if (!hasSimpleGetterSetter(lhsQME.getDeclaration())) {
out(",", getLHS);
}
out(")");
}
}
}
@Override public void visit(final NegativeOp that) {
unaryOp(that, new UnaryOpGenerator() {
@Override
public void generate(UnaryOpTermGenerator termgen) {
TypeDeclaration d = that.getTerm().getTypeModel().getDeclaration();
if (d.inherits(types._integer)) {
out("(-");
termgen.term();
out(")");
//This is not really optimal yet, since it generates
//stuff like Float(-Float((5.1)))
/*} else if (d.inherits(types._float)) {
out(clAlias, "Float(-");
termgen.term();
out(")");*/
} else {
termgen.term();
out(".negativeValue");
}
}
});
}
@Override public void visit(final PositiveOp that) {
unaryOp(that, new UnaryOpGenerator() {
@Override
public void generate(UnaryOpTermGenerator termgen) {
TypeDeclaration d = that.getTerm().getTypeModel().getDeclaration();
if (d.inherits(types._integer) || d.inherits(types._float)) {
out("(+");
termgen.term();
out(")");
} else {
termgen.term();
out(".positiveValue");
}
}
});
}
@Override public void visit(EqualOp that) {
if (dynblock > 0 && TypeUtils.isUnknown(that.getLeftTerm().getTypeModel())) {
//Try to use equals() if it exists
String ltmp = names.createTempVariable();
String rtmp = names.createTempVariable();
out("(", ltmp, "=");
box(that.getLeftTerm());
out(",", rtmp, "=");
box(that.getRightTerm());
out(",(", ltmp, ".equals&&", ltmp, ".equals(", rtmp, "))||", ltmp, "===", rtmp, ")");
} else {
leftEqualsRight(that);
}
}
@Override public void visit(NotEqualOp that) {
if (dynblock > 0 && TypeUtils.isUnknown(that.getLeftTerm().getTypeModel())) {
//Try to use equals() if it exists
String ltmp = names.createTempVariable();
String rtmp = names.createTempVariable();
out("(", ltmp, "=");
box(that.getLeftTerm());
out(",", rtmp, "=");
box(that.getRightTerm());
out(",(", ltmp, ".equals&&!", ltmp, ".equals(", rtmp, "))||", ltmp, "!==", rtmp, ")");
} else {
out("(!");
leftEqualsRight(that);
out(")");
}
}
@Override public void visit(NotOp that) {
unaryOp(that, new UnaryOpGenerator() {
@Override
public void generate(UnaryOpTermGenerator termgen) {
out("(!");
termgen.term();
out(")");
}
});
}
@Override public void visit(IdenticalOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
out("(");
termgen.left();
out("===");
termgen.right();
out(")");
}
});
}
@Override public void visit(CompareOp that) {
leftCompareRight(that);
}
@Override public void visit(SmallerOp that) {
if (dynblock > 0 && TypeUtils.isUnknown(that.getLeftTerm().getTypeModel())) {
//Try to use compare() if it exists
String ltmp = names.createTempVariable();
String rtmp = names.createTempVariable();
out("(", ltmp, "=");
box(that.getLeftTerm());
out(",", rtmp, "=");
box(that.getRightTerm());
out(",(", ltmp, ".compare&&", ltmp, ".compare(", rtmp, ").equals(",
clAlias, "getSmaller()))||", ltmp, "<", rtmp, ")");
} else {
leftCompareRight(that);
out(".equals(", clAlias, "getSmaller())");
}
}
@Override public void visit(LargerOp that) {
if (dynblock > 0 && TypeUtils.isUnknown(that.getLeftTerm().getTypeModel())) {
//Try to use compare() if it exists
String ltmp = names.createTempVariable();
String rtmp = names.createTempVariable();
out("(", ltmp, "=");
box(that.getLeftTerm());
out(",", rtmp, "=");
box(that.getRightTerm());
out(",(", ltmp, ".compare&&", ltmp, ".compare(", rtmp, ").equals(",
clAlias, "getLarger()))||", ltmp, ">", rtmp, ")");
} else {
leftCompareRight(that);
out(".equals(", clAlias, "getLarger())");
}
}
@Override public void visit(SmallAsOp that) {
if (dynblock > 0 && TypeUtils.isUnknown(that.getLeftTerm().getTypeModel())) {
//Try to use compare() if it exists
String ltmp = names.createTempVariable();
String rtmp = names.createTempVariable();
out("(", ltmp, "=");
box(that.getLeftTerm());
out(",", rtmp, "=");
box(that.getRightTerm());
out(",(", ltmp, ".compare&&", ltmp, ".compare(", rtmp, "!==",
clAlias, "getLarger()))||", ltmp, "<=", rtmp, ")");
} else {
out("(");
leftCompareRight(that);
out("!==", clAlias, "getLarger()");
out(")");
}
}
@Override public void visit(LargeAsOp that) {
if (dynblock > 0 && TypeUtils.isUnknown(that.getLeftTerm().getTypeModel())) {
//Try to use compare() if it exists
String ltmp = names.createTempVariable();
String rtmp = names.createTempVariable();
out("(", ltmp, "=");
box(that.getLeftTerm());
out(",", rtmp, "=");
box(that.getRightTerm());
out(",(", ltmp, ".compare&&", ltmp, ".compare(", rtmp, "!==",
clAlias, "getSmaller()))||", ltmp, ">=", rtmp, ")");
} else {
out("(");
leftCompareRight(that);
out("!==", clAlias, "getSmaller()");
out(")");
}
}
/** Outputs the CL equivalent of 'a==b' in JS. */
private void leftEqualsRight(BinaryOperatorExpression that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
termgen.left();
out(".equals(");
termgen.right();
out(")");
}
});
}
interface UnaryOpTermGenerator {
void term();
}
interface UnaryOpGenerator {
void generate(UnaryOpTermGenerator termgen);
}
private void unaryOp(final UnaryOperatorExpression that, final UnaryOpGenerator gen) {
final GenerateJsVisitor visitor = this;
gen.generate(new UnaryOpTermGenerator() {
@Override
public void term() {
int boxTypeLeft = boxStart(that.getTerm());
that.getTerm().visit(visitor);
boxUnboxEnd(boxTypeLeft);
}
});
}
interface BinaryOpTermGenerator {
void left();
void right();
}
interface BinaryOpGenerator {
void generate(BinaryOpTermGenerator termgen);
}
private void binaryOp(final BinaryOperatorExpression that, final BinaryOpGenerator gen) {
gen.generate(new BinaryOpTermGenerator() {
@Override
public void left() {
box(that.getLeftTerm());
}
@Override
public void right() {
box(that.getRightTerm());
}
});
}
/** Outputs the CL equivalent of 'a <=> b' in JS. */
private void leftCompareRight(BinaryOperatorExpression that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
termgen.left();
out(".compare(");
termgen.right();
out(")");
}
});
}
@Override public void visit(AndOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
out("(");
termgen.left();
out("&&");
termgen.right();
out(")");
}
});
}
@Override public void visit(OrOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
out("(");
termgen.left();
out("||");
termgen.right();
out(")");
}
});
}
@Override public void visit(final EntryOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
out(clAlias, "Entry(");
termgen.left();
out(",");
termgen.right();
out(",");
TypeUtils.printTypeArguments(that, that.getTypeModel().getTypeArguments(),
GenerateJsVisitor.this);
out(")");
}
});
}
@Override public void visit(Element that) {
out(".get(");
that.getExpression().visit(this);
out(")");
}
@Override public void visit(DefaultOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
String lhsVar = createRetainedTempVar("opt");
out("(", lhsVar, "=");
termgen.left();
out(",", lhsVar, "!==null?", lhsVar, ":");
termgen.right();
out(")");
}
});
}
@Override public void visit(ThenOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
out("(");
termgen.left();
out("?");
termgen.right();
out(":null)");
}
});
}
@Override public void visit(IncrementOp that) {
prefixIncrementOrDecrement(that.getTerm(), "successor");
}
@Override public void visit(DecrementOp that) {
prefixIncrementOrDecrement(that.getTerm(), "predecessor");
}
private boolean hasSimpleGetterSetter(Declaration decl) {
return (dynblock > 0 && TypeUtils.isUnknown(decl)) ||
!((decl instanceof Getter) || (decl instanceof Setter) || decl.isFormal());
}
private void prefixIncrementOrDecrement(Term term, String functionName) {
if (term instanceof BaseMemberExpression) {
BaseMemberExpression bme = (BaseMemberExpression) term;
boolean simpleSetter = hasSimpleGetterSetter(bme.getDeclaration());
String getMember = memberAccess(bme);
String applyFunc = String.format("%s.%s", getMember, functionName);
out("(");
generateMemberAccess(bme, applyFunc, true);
if (!simpleSetter) { out(",", getMember); }
out(")");
} else if (term instanceof QualifiedMemberExpression) {
QualifiedMemberExpression qme = (QualifiedMemberExpression) term;
String primaryVar = createRetainedTempVar();
String getMember = primaryVar + "." + memberAccess(qme);
String applyFunc = String.format("%s.%s", getMember, functionName);
out("(", primaryVar, "=");
qme.getPrimary().visit(this);
out(",", primaryVar, ".");
generateMemberAccess(qme, applyFunc, false);
if (!hasSimpleGetterSetter(qme.getDeclaration())) {
out(",", getMember);
}
out(")");
}
}
@Override public void visit(PostfixIncrementOp that) {
postfixIncrementOrDecrement(that.getTerm(), "successor");
}
@Override public void visit(PostfixDecrementOp that) {
postfixIncrementOrDecrement(that.getTerm(), "predecessor");
}
private void postfixIncrementOrDecrement(Term term, String functionName) {
if (term instanceof BaseMemberExpression) {
BaseMemberExpression bme = (BaseMemberExpression) term;
if (bme.getDeclaration() == null && dynblock > 0) {
out(bme.getIdentifier().getText(), "successor".equals(functionName) ? "++" : "--");
return;
}
String oldValueVar = createRetainedTempVar("old" + bme.getDeclaration().getName());
String applyFunc = String.format("%s.%s", oldValueVar, functionName);
out("(", oldValueVar, "=", memberAccess(bme), ",");
generateMemberAccess(bme, applyFunc, true);
out(",", oldValueVar, ")");
} else if (term instanceof QualifiedMemberExpression) {
QualifiedMemberExpression qme = (QualifiedMemberExpression) term;
if (qme.getDeclaration() == null && dynblock > 0) {
out(qme.getIdentifier().getText(), "successor".equals(functionName) ? "++" : "--");
return;
}
String primaryVar = createRetainedTempVar();
String oldValueVar = createRetainedTempVar("old" + qme.getDeclaration().getName());
String applyFunc = String.format("%s.%s", oldValueVar, functionName);
out("(", primaryVar, "=");
qme.getPrimary().visit(this);
out(",", oldValueVar, "=", primaryVar, ".", memberAccess(qme), ",",
primaryVar, ".");
generateMemberAccess(qme, applyFunc, false);
out(",", oldValueVar, ")");
}
}
@Override
public void visit(final UnionOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
termgen.left();
out(".union(");
termgen.right();
out(",");
TypeUtils.printTypeArguments(that, that.getRightTerm().getTypeModel().getTypeArguments(),
GenerateJsVisitor.this);
out(")");
}
});
}
@Override
public void visit(final IntersectionOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
termgen.left();
out(".intersection(");
termgen.right();
out(",");
TypeUtils.printTypeArguments(that, that.getRightTerm().getTypeModel().getTypeArguments(),
GenerateJsVisitor.this);
out(")");
}
});
}
@Override
public void visit(final XorOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
termgen.left();
out(".exclusiveUnion(");
termgen.right();
out(",");
TypeUtils.printTypeArguments(that, that.getRightTerm().getTypeModel().getTypeArguments(),
GenerateJsVisitor.this);
out(")");
}
});
}
@Override
public void visit(final ComplementOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
termgen.left();
out(".complement(");
termgen.right();
out(",");
TypeUtils.printTypeArguments(that, that.getRightTerm().getTypeModel().getTypeArguments(),
GenerateJsVisitor.this);
out(")");
}
});
}
@Override public void visit(Exists that) {
unaryOp(that, new UnaryOpGenerator() {
@Override
public void generate(UnaryOpTermGenerator termgen) {
out(clAlias, "exists(");
termgen.term();
out(")");
}
});
}
@Override public void visit(Nonempty that) {
unaryOp(that, new UnaryOpGenerator() {
@Override
public void generate(UnaryOpTermGenerator termgen) {
out(clAlias, "nonempty(");
termgen.term();
out(")");
}
});
}
//Don't know if we'll ever see this...
@Override public void visit(ConditionList that) {
System.out.println("ZOMG condition list in the wild! " + that.getLocation() + " of " + that.getUnit().getFilename());
super.visit(that);
}
@Override public void visit(BooleanCondition that) {
int boxType = boxStart(that.getExpression().getTerm());
super.visit(that);
boxUnboxEnd(boxType);
}
@Override public void visit(IfStatement that) {
conds.generateIf(that);
}
@Override public void visit(WhileStatement that) {
conds.generateWhile(that);
}
/** Generates js code to check if a term is of a certain type. We solve this in JS by
* checking against all types that Type satisfies (in the case of union types, matching any
* type will do, and in case of intersection types, all types must be matched).
* @param term The term that is to be checked against a type
* @param termString (optional) a string to be used as the term to be checked
* @param type The type to check against
* @param tmpvar (optional) a variable to which the term is assigned
* @param negate If true, negates the generated condition
*/
void generateIsOfType(Node term, String termString, Type type, String tmpvar, final boolean negate) {
if (negate) {
out("!");
}
out(clAlias, "isOfType(");
if (term instanceof Term) {
conds.specialConditionRHS((Term)term, tmpvar);
} else {
conds.specialConditionRHS(termString, tmpvar);
}
out(",");
TypeUtils.typeNameOrList(term, type.getTypeModel(), this, true);
out(")");
}
@Override
public void visit(IsOp that) {
generateIsOfType(that.getTerm(), null, that.getType(), null, false);
}
@Override public void visit(Break that) {
if (continues.isEmpty()) {
out("break;");
} else {
Continuation top=continues.peek();
if (that.getScope()==top.getScope()) {
top.useBreak();
out(top.getBreakName(), "=true; return;");
} else {
out("break;");
}
}
}
@Override public void visit(Continue that) {
if (continues.isEmpty()) {
out("continue;");
} else {
Continuation top=continues.peek();
if (that.getScope()==top.getScope()) {
top.useContinue();
out(top.getContinueName(), "=true; return;");
} else {
out("continue;");
}
}
}
@Override public void visit(final RangeOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
out(clAlias, "Range(");
termgen.left();
out(",");
termgen.right();
out(",");
TypeUtils.printTypeArguments(that,
that.getTypeModel().getTypeArguments(),
GenerateJsVisitor.this);
out(")");
}
});
}
@Override public void visit(ForStatement that) {
if (comment) {
out("//'for' statement at ", that.getUnit().getFilename(), " (", that.getLocation(), ")");
if (that.getExits()) out("//EXITS!");
endLine();
}
ForIterator foriter = that.getForClause().getForIterator();
final String itemVar = generateForLoop(foriter);
boolean hasElse = that.getElseClause() != null && !that.getElseClause().getBlock().getStatements().isEmpty();
visitStatements(that.getForClause().getBlock().getStatements());
//If there's an else block, check for normal termination
endBlock();
if (hasElse) {
endLine();
out("if (", clAlias, "getFinished() === ", itemVar, ")");
encloseBlockInFunction(that.getElseClause().getBlock());
}
}
/** Generates code for the beginning of a "for" loop, returning the name of the variable used for the item. */
private String generateForLoop(ForIterator that) {
SpecifierExpression iterable = that.getSpecifierExpression();
final String iterVar = names.createTempVariable("it");
final String itemVar;
if (that instanceof ValueIterator) {
itemVar = names.name(((ValueIterator)that).getVariable().getDeclarationModel());
} else {
itemVar = names.createTempVariable("item");
}
out("var ", iterVar, " = ");
iterable.visit(this);
out(".iterator;");
endLine();
out("var ", itemVar, ";while ((", itemVar, "=", iterVar, ".next())!==", clAlias, "getFinished())");
beginBlock();
if (that instanceof ValueIterator) {
directAccess.add(((ValueIterator)that).getVariable().getDeclarationModel());
} else if (that instanceof KeyValueIterator) {
String keyvar = names.name(((KeyValueIterator)that).getKeyVariable().getDeclarationModel());
String valvar = names.name(((KeyValueIterator)that).getValueVariable().getDeclarationModel());
out("var ", keyvar, "=", itemVar, ".key;");
endLine();
out("var ", valvar, "=", itemVar, ".item;");
directAccess.add(((KeyValueIterator)that).getKeyVariable().getDeclarationModel());
directAccess.add(((KeyValueIterator)that).getValueVariable().getDeclarationModel());
endLine();
}
return itemVar;
}
public void visit(InOp that) {
binaryOp(that, new BinaryOpGenerator() {
@Override
public void generate(BinaryOpTermGenerator termgen) {
termgen.right();
out(".contains(");
termgen.left();
out(")");
}
});
}
@Override public void visit(TryCatchStatement that) {
out("try");
encloseBlockInFunction(that.getTryClause().getBlock());
if (!that.getCatchClauses().isEmpty()) {
String catchVarName = names.createTempVariable("ex");
out("catch(", catchVarName, ")");
beginBlock();
boolean firstCatch = true;
for (CatchClause catchClause : that.getCatchClauses()) {
Variable variable = catchClause.getCatchVariable().getVariable();
if (!firstCatch) {
out("else ");
}
firstCatch = false;
out("if(");
generateIsOfType(variable, catchVarName, variable.getType(), null, false);
out(")");
if (catchClause.getBlock().getStatements().isEmpty()) {
out("{}");
} else {
beginBlock();
directAccess.add(variable.getDeclarationModel());
names.forceName(variable.getDeclarationModel(), catchVarName);
visitStatements(catchClause.getBlock().getStatements());
endBlockNewLine();
}
}
out("else{throw ", catchVarName, "}");
endBlockNewLine();
}
if (that.getFinallyClause() != null) {
out("finally");
encloseBlockInFunction(that.getFinallyClause().getBlock());
}
}
@Override public void visit(Throw that) {
out("throw ");
if (that.getExpression() != null) {
that.getExpression().visit(this);
} else {
out(clAlias, "Exception()");
}
out(";");
}
private void visitIndex(IndexExpression that) {
that.getPrimary().visit(this);
ElementOrRange eor = that.getElementOrRange();
if (eor instanceof Element) {
if (TypeUtils.isUnknown(that.getPrimary().getTypeModel()) && dynblock > 0) {
out("[");
((Element)eor).getExpression().visit(this);
out("]");
} else {
out(".get(");
((Element)eor).getExpression().visit(this);
out(")");
}
} else {//range, or spread?
ElementRange er = (ElementRange)eor;
Expression sexpr = er.getLength();
if (sexpr == null) {
if (er.getLowerBound() == null) {
out(".spanTo(");
} else if (er.getUpperBound() == null) {
out(".spanFrom(");
} else {
out(".span(");
}
} else {
out(".segment(");
}
if (er.getLowerBound() != null) {
er.getLowerBound().visit(this);
if (er.getUpperBound() != null || sexpr != null) {
out(",");
}
}
if (er.getUpperBound() != null) {
er.getUpperBound().visit(this);
} else if (sexpr != null) {
sexpr.visit(this);
}
out(")");
}
}
public void visit(IndexExpression that) {
visitIndex(that);
}
/** Generates code for a case clause, as part of a switch statement. Each case
* is rendered as an if. */
private void caseClause(CaseClause cc, String expvar, Term switchTerm) {
out("if (");
final CaseItem item = cc.getCaseItem();
if (item instanceof IsCase) {
IsCase isCaseItem = (IsCase) item;
generateIsOfType(switchTerm, expvar, isCaseItem.getType(), null, false);
Variable caseVar = isCaseItem.getVariable();
if (caseVar != null) {
directAccess.add(caseVar.getDeclarationModel());
names.forceName(caseVar.getDeclarationModel(), expvar);
}
} else if (item instanceof SatisfiesCase) {
item.addError("case(satisfies) not yet supported");
out("true");
} else if (item instanceof MatchCase){
boolean first = true;
for (Expression exp : ((MatchCase)item).getExpressionList().getExpressions()) {
if (!first) out(" || ");
out(expvar, "==="); //TODO equality?
/*out(".equals(");*/
exp.visit(this);
//out(")==="); clAlias(); out("getTrue()");
first = false;
}
} else {
cc.addUnexpectedError("support for case of type " + cc.getClass().getSimpleName() + " not yet implemented");
}
out(") ");
encloseBlockInFunction(cc.getBlock());
}
@Override
public void visit(SwitchStatement that) {
if (comment) out("//Switch statement at ", that.getUnit().getFilename(), " (", that.getLocation(), ")");
endLine();
//Put the expression in a tmp var
final String expvar = names.createTempVariable("case");
out("var ", expvar, "=");
Expression expr = that.getSwitchClause().getExpression();
expr.visit(this);
endLine(true);
//For each case, do an if
boolean first = true;
for (CaseClause cc : that.getSwitchCaseList().getCaseClauses()) {
if (!first) out("else ");
caseClause(cc, expvar, expr.getTerm());
first = false;
}
if (that.getSwitchCaseList().getElseClause() != null) {
out("else ");
that.getSwitchCaseList().getElseClause().visit(this);
}
if (comment) {
out("//End switch statement at ", that.getUnit().getFilename(), " (", that.getLocation(), ")");
endLine();
}
}
/** Generates the code for an anonymous function defined inside an argument list. */
@Override
public void visit(final FunctionArgument that) {
singleExprFunction(that.getParameterLists(), that.getExpression(), that.getScope());
}
private void singleExprFunction(final List<ParameterList> paramLists,
final Expression expr, final Scope scope) {
generateParameterLists(paramLists, scope, new ParameterListCallback() {
@Override
public void completeFunction() {
beginBlock();
if (paramLists.size() == 1) { initSelf(scope); }
initParameters(paramLists.get(paramLists.size()-1), null);
out("return ");
expr.visit(GenerateJsVisitor.this);
out(";");
endBlock();
}
});
}
/** Generates the code for a function in a named argument list. */
@Override
public void visit(final MethodArgument that) {
generateParameterLists(that.getParameterLists(), that.getScope(),
new ParameterListCallback() {
@Override
public void completeFunction() {
Block block = that.getBlock();
SpecifierExpression specExpr = that.getSpecifierExpression();
if (specExpr != null) {
out("{return ");
specExpr.getExpression().visit(GenerateJsVisitor.this);
out(";}");
}
else if (block != null) {
block.visit(GenerateJsVisitor.this);
}
}
});
}
@Override
public void visit(SegmentOp that) {
String rhs = names.createTempVariable();
out("(function(){var ", rhs, "=");
that.getRightTerm().visit(this);
endLine(true);
out("if (", rhs, ">0){");
endLine();
String lhs = names.createTempVariable();
String end = names.createTempVariable();
out("var ", lhs, "=");
that.getLeftTerm().visit(this);
endLine(true);
out("var ", end, "=", lhs);
endLine(true);
out("for (var i=1; i<", rhs, "; i++){", end, "=", end, ".successor;}");
endLine();
out("return ", clAlias, "Range(");
out(lhs, ",", end, ")");
endLine();
out("}else return ", clAlias, "getEmpty();}())");
}
/** Generates the code for single or multiple parameter lists, with a callback function to generate the function blocks. */
private void generateParameterLists(List<ParameterList> plist, Scope scope,
ParameterListCallback callback) {
if (plist.size() == 1) {
out(function);
ParameterList paramList = plist.get(0);
paramList.visit(this);
callback.completeFunction();
} else {
int count=0;
for (ParameterList paramList : plist) {
if (count==0) {
out(function);
} else {
out("return function");
}
paramList.visit(this);
if (count == 0) {
beginBlock();
initSelf(scope);
initParameters(paramList, null);
}
else {
out("{");
}
count++;
}
callback.completeFunction();
for (int i=0; i < count; i++) {
endBlock(false, i==count-1);
}
}
}
/** Encloses the block in a function, IF NEEDED. */
void encloseBlockInFunction(Block block) {
boolean wrap=encloser.encloseBlock(block);
if (wrap) {
beginBlock();
Continuation c = new Continuation(block.getScope(), names);
continues.push(c);
out("var ", c.getContinueName(), "=false;"); endLine();
out("var ", c.getBreakName(), "=false;"); endLine();
out("var ", c.getReturnName(), "=(function()");
}
block.visit(this);
if (wrap) {
Continuation c = continues.pop();
out("());if(", c.getReturnName(), "!==undefined){return ", c.getReturnName(), ";}");
if (c.isContinued()) {
out("else if(", c.getContinueName(),"===true){continue;}");
}
if (c.isBreaked()) {
out("else if (", c.getBreakName(),"===true){break;}");
}
endBlockNewLine();
}
}
private static class Continuation {
private final String cvar;
private final String rvar;
private final String bvar;
private final Scope scope;
private boolean cused, bused;
public Continuation(Scope scope, JsIdentifierNames names) {
this.scope=scope;
cvar = names.createTempVariable("cntvar");
rvar = names.createTempVariable("retvar");
bvar = names.createTempVariable("brkvar");
}
public Scope getScope() { return scope; }
public String getContinueName() { return cvar; }
public String getBreakName() { return bvar; }
public String getReturnName() { return rvar; }
public void useContinue() { cused = true; }
public void useBreak() { bused=true; }
public boolean isContinued() { return cused; }
public boolean isBreaked() { return bused; } //"isBroken" sounds really really bad in this case
}
private static interface ParameterListCallback {
void completeFunction();
}
/** This interface is used inside type initialization method. */
private interface PrototypeInitCallback {
void addToPrototypeCallback();
}
@Override
public void visit(Tuple that) {
int count = 0;
SequencedArgument sarg = that.getSequencedArgument();
if (sarg == null) {
out(clAlias, "getEmpty()");
} else {
List<Map<TypeParameter,ProducedType>> targs = new ArrayList<Map<TypeParameter,ProducedType>>();
List<PositionalArgument> positionalArguments = sarg.getPositionalArguments();
boolean spread = !positionalArguments.isEmpty()
&& positionalArguments.get(positionalArguments.size()-1) instanceof Tree.ListedArgument == false;
int lim = positionalArguments.size()-1;
for (PositionalArgument expr : positionalArguments) {
if (count > 0) {
out(",");
}
ProducedType exprType = expr.getTypeModel();
if (count==lim && spread) {
if (exprType.getDeclaration().inherits(types.tuple)) {
expr.visit(this);
} else {
expr.visit(this);
out(".sequence");
}
} else {
out(clAlias, "Tuple(");
if (count > 0) {
for (Map.Entry<TypeParameter,ProducedType> e : targs.get(0).entrySet()) {
if (e.getKey().getName().equals("Rest")) {
targs.add(0, e.getValue().getTypeArguments());
}
}
} else {
targs.add(that.getTypeModel().getTypeArguments());
}
if (dynblock > 0 && TypeUtils.isUnknown(exprType) && expr instanceof ListedArgument) {
exprType = types.anything.getType();
TypeUtils.generateDynamicCheck(((ListedArgument)expr).getExpression(), exprType, this);
} else {
expr.visit(this);
}
}
count++;
}
if (!spread) {
if (count > 0) {
out(",");
}
out(clAlias, "getEmpty()");
} else {
count--;
}
for (Map<TypeParameter,ProducedType> t : targs) {
out(",");
TypeUtils.printTypeArguments(that, t, this);
out(")");
}
}
}
@Override
public void visit(Assertion that) {
out("//assert");
location(that);
String custom = "Assertion failed";
//Scan for a "doc" annotation with custom message
for (Annotation ann : that.getAnnotationList().getAnnotations()) {
BaseMemberExpression bme = (BaseMemberExpression)ann.getPrimary();
if ("doc".equals(bme.getDeclaration().getName())) {
custom = ((Tree.ListedArgument)ann.getPositionalArgumentList().getPositionalArguments().get(0)).getExpression().getTerm().getText();
}
}
endLine();
StringBuilder sb = new StringBuilder(custom).append(": '");
for (int i = that.getConditionList().getToken().getTokenIndex()+1;
i < that.getConditionList().getEndToken().getTokenIndex(); i++) {
sb.append(tokens.get(i).getText());
}
sb.append("' at ").append(that.getUnit().getFilename()).append(" (").append(
that.getConditionList().getLocation()).append(")");
conds.specialConditionsAndBlock(that.getConditionList(), null, "if (!");
//escape
custom = escapeStringLiteral(sb.toString());
out(") { throw ", clAlias, "AssertionException('", custom, "'); }");
endLine();
}
@Override
public void visit(Tree.DynamicClause that) {
dynblock++;
out("/*Begin dynamic block*/");
super.visit(that);
out("/*End dynamic block*/");
dynblock--;
}
/** Closes a native array and invokes reifyCeylonType with the specified type parameters. */
void closeSequenceWithReifiedType(Node that, Map<TypeParameter,ProducedType> types) {
out("].reifyCeylonType(");
TypeUtils.printTypeArguments(that, types, this);
out(")");
}
boolean isInDynamicBlock() {
return dynblock > 0;
}
}
| fix super member access for attributes | src/main/java/com/redhat/ceylon/compiler/js/GenerateJsVisitor.java | fix super member access for attributes | <ide><path>rc/main/java/com/redhat/ceylon/compiler/js/GenerateJsVisitor.java
<ide> package com.redhat.ceylon.compiler.js;
<ide>
<ide> import java.io.IOException;
<add>import java.io.StringWriter;
<ide> import java.io.Writer;
<ide> import java.util.ArrayList;
<ide> import java.util.HashMap;
<ide> private List<? extends Statement> currentStatements = null;
<ide>
<ide> private final TypeUtils types;
<del> private final Writer out;
<add> private Writer out;
<ide> final boolean prototypeStyle;
<ide> private CompilationUnit root;
<ide> private static String clAlias="";
<ide> /** Returns the helper component to handle naming. */
<ide> JsIdentifierNames getNames() { return names; }
<ide>
<add> private static interface GenerateCallback {
<add> public void generateValue();
<add> }
<add>
<ide> /** Print generated code to the Writer specified at creation time.
<ide> * Automatically prints indentation first if necessary.
<ide> * @param code The main code
<ide> out(" at ", node.getUnit().getFilename(), " (", node.getLocation(), ")");
<ide> }
<ide>
<add> private String generateToString(final GenerateCallback callback) {
<add> final Writer oldWriter = out;
<add> out = new StringWriter();
<add> callback.generateValue();
<add> final String str = out.toString();
<add> out = oldWriter;
<add> return str;
<add> }
<add>
<ide> @Override
<ide> public void visit(CompilationUnit that) {
<ide> root = that;
<ide> PositionalArgumentList argList = extendedType.getInvocationExpression()
<ide> .getPositionalArgumentList();
<ide> TypeDeclaration typeDecl = extendedType.getType().getDeclarationModel();
<del> qualify(that, typeDecl);
<del> out(memberAccessBase(extendedType.getType(), typeDecl, false, false),
<add> out(memberAccessBase(extendedType.getType(), typeDecl, false, qualifiedPath(that, typeDecl)),
<ide> (prototypeStyle && (getSuperMemberScope(extendedType.getType()) != null))
<ide> ? ".call(this," : "(");
<ide>
<ide> }
<ide> boolean inProto = prototypeStyle
<ide> && (type.getScope().getContainer() instanceof TypeDeclaration);
<del> String constr = qualifiedPath(type, d, inProto);
<del> if (constr.length() > 0) {
<del> constr += '.';
<del> }
<del> constr += memberAccessBase(type, d, false, false);
<del> return constr;
<add> return memberAccessBase(type, d, false, qualifiedPath(type, d, inProto));
<ide> }
<ide>
<ide> private void addToPrototype(ClassOrInterface d, List<Statement> statements) {
<ide> }
<ide> }
<ide> }
<del> out(memberAccess(that));
<add> out(memberAccess(that, null));
<ide> }
<ide>
<ide> private boolean accessDirectly(Declaration d) {
<ide> if (isMethod) {
<ide> out(clAlias, "JsCallable(", lhsVar, ",");
<ide> }
<del> out(lhsVar, "!==null?", lhsVar, ".", memberAccess(that), ":null)");
<add> out(lhsVar, "!==null?", memberAccess(that, lhsVar), ":null)");
<ide> if (isMethod) {
<ide> out(")");
<ide> }
<ide> }
<ide>
<ide> @Override
<del> public void visit(QualifiedMemberExpression that) {
<add> public void visit(final QualifiedMemberExpression that) {
<ide> //Big TODO: make sure the member is actually
<ide> // refined by the current class!
<ide> if (that.getMemberOperator() instanceof SafeMemberOp) {
<ide> //looks like checking for signature is a good way (not THE way though; named arg calls don't have signature)
<ide> generateCallable(that, null);
<ide> } else {
<del> super.visit(that);
<del> out(".", memberAccess(that));
<add> final String lhs = generateToString(new GenerateCallback() {
<add> @Override public void generateValue() {
<add> GenerateJsVisitor.super.visit(that);
<add> }
<add> });
<add> out(memberAccess(that, lhs));
<ide> }
<ide> }
<ide>
<ide> //Add value or reference to the array
<ide> out(tmplist, ".push(");
<ide> if (isMethod) {
<del> out("{o:", elem, ", f:", elem, ".", memberAccess(that), "}");
<add> out("{o:", elem, ", f:", memberAccess(that, elem), "}");
<ide> } else {
<del> out(elem, ".", memberAccess(that));
<add> out(memberAccess(that, elem));
<ide> }
<ide> out(");");
<ide> endBlockNewLine();
<ide> out("(", primaryVar, "=");
<ide> that.getPrimary().visit(this);
<ide> out(",", clAlias, "JsCallable(", primaryVar, ",", primaryVar, "!==null?",
<del> primaryVar, ".", (name == null) ? memberAccess(that) : name, ":null))");
<add> (name == null) ? memberAccess(that, primaryVar) : (primaryVar+"."+name), ":null))");
<ide> }
<ide>
<ide> /**
<ide> }
<ide>
<ide> private String memberAccessBase(Node node, Declaration decl, boolean setter,
<del> boolean qualifyBaseExpr) {
<add> String lhs) {
<ide> StringBuilder sb = new StringBuilder();
<ide>
<del> if (qualifyBaseExpr && (node instanceof BaseMemberOrTypeExpression)) {
<add> if (lhs != null) {
<add> if (lhs.length() > 0) {
<add> sb.append(lhs).append(".");
<add> }
<add> }
<add> else if (node instanceof BaseMemberOrTypeExpression) {
<ide> BaseMemberOrTypeExpression bmte = (BaseMemberOrTypeExpression) node;
<ide> String path = qualifiedPath(node, bmte.getDeclaration());
<ide> if (path.length() > 0) {
<ide> * then the LHS is *not* included. If it is a BaseMemberOrTypeExpression and
<ide> * qualifyBaseExpr==true then the qualified path is included.
<ide> */
<del> private String memberAccess(StaticMemberOrTypeExpression expr, boolean qualifyBaseExpr) {
<add> private String memberAccess(StaticMemberOrTypeExpression expr, String lhs) {
<ide> Declaration decl = expr.getDeclaration();
<add> String plainName = null;
<ide> if (decl == null && dynblock > 0) {
<del> return expr.getIdentifier().getText();
<del> }
<del> if (isNative(decl)) {
<add> plainName = expr.getIdentifier().getText();
<add> }
<add> else if (isNative(decl)) {
<ide> // direct access to a native element
<del> return decl.getName();
<add> plainName = decl.getName();
<add> }
<add> if (plainName != null) {
<add> return ((lhs != null) && (lhs.length() > 0))
<add> ? (lhs + "." + plainName) : plainName;
<ide> }
<ide> boolean protoCall = prototypeStyle && (getSuperMemberScope(expr) != null);
<ide> if (accessDirectly(decl) && !(protoCall && defineAsProperty(decl))) {
<ide> // direct access, without getter
<del> return memberAccessBase(expr, decl, false, qualifyBaseExpr);
<add> return memberAccessBase(expr, decl, false, lhs);
<ide> }
<ide> // access through getter
<del> return memberAccessBase(expr, decl, false, qualifyBaseExpr)
<add> return memberAccessBase(expr, decl, false, lhs)
<ide> + (protoCall ? ".call(this)" : "()");
<del> }
<del> private String memberAccess(StaticMemberOrTypeExpression expr) {
<del> return memberAccess(expr, true);
<del> }
<del>
<del> private static interface MemberAccessCallback {
<del> public void generateValue();
<ide> }
<ide>
<ide> /**
<ide> * qualifyBaseExpr==true then the qualified path is included.
<ide> */
<ide> private void generateMemberAccess(StaticMemberOrTypeExpression expr,
<del> MemberAccessCallback callback, boolean qualifyBaseExpr) {
<add> GenerateCallback callback, String lhs) {
<ide> Declaration decl = expr.getDeclaration();
<ide> boolean paren = false;
<add> String plainName = null;
<ide> if (decl == null && dynblock > 0) {
<del> out(expr.getIdentifier().getText(), "=");
<add> plainName = expr.getIdentifier().getText();
<ide> } else if (isNative(decl)) {
<ide> // direct access to a native element
<del> out(decl.getName(), "=");
<add> plainName = decl.getName();
<add> }
<add> if (plainName != null) {
<add> if ((lhs != null) && (lhs.length() > 0)) {
<add> out(lhs, ".");
<add> }
<add> out(plainName, "=");
<ide> }
<ide> else {
<ide> boolean protoCall = prototypeStyle && (getSuperMemberScope(expr) != null);
<ide> if (accessDirectly(decl) && !(protoCall && defineAsProperty(decl))) {
<ide> // direct access, without setter
<del> out(memberAccessBase(expr, decl, true, qualifyBaseExpr), "=");
<add> out(memberAccessBase(expr, decl, true, lhs), "=");
<ide> }
<ide> else {
<ide> // access through setter
<del> out(memberAccessBase(expr, decl, true, qualifyBaseExpr),
<add> out(memberAccessBase(expr, decl, true, lhs),
<ide> protoCall ? ".call(this," : "(");
<ide> paren = true;
<ide> }
<ide> callback.generateValue();
<ide> if (paren) { out(")"); }
<ide> }
<del> private void generateMemberAccess(StaticMemberOrTypeExpression expr, final String strValue,
<del> boolean qualifyBaseExpr) {
<del> generateMemberAccess(expr, new MemberAccessCallback() {
<add> private void generateMemberAccess(final StaticMemberOrTypeExpression expr,
<add> final String strValue, final String lhs) {
<add> generateMemberAccess(expr, new GenerateCallback() {
<ide> @Override public void generateValue() { out(strValue); }
<del> }, qualifyBaseExpr);
<add> }, lhs);
<ide> }
<ide>
<ide> @Override
<ide> final MethodOrValue moval = (MethodOrValue)bmeDecl;
<ide> if (moval.isVariable()) {
<ide> // simple assignment to a variable attribute
<del> generateMemberAccess(bme, new MemberAccessCallback() {
<add> generateMemberAccess(bme, new GenerateCallback() {
<ide> @Override public void generateValue() {
<ide> int boxType = boxUnboxStart(specStmt.getSpecifierExpression().getExpression().getTerm(),
<ide> moval);
<ide> }
<ide> boxUnboxEnd(boxType);
<ide> }
<del> }, true);
<add> }, null);
<ide> out(";");
<ide> } else if (moval.isMember()) {
<ide> // Specifier for a member attribute. This actually defines the
<ide> Declaration bmeDecl = bme.getDeclaration();
<ide> boolean simpleSetter = hasSimpleGetterSetter(bmeDecl);
<ide> if (!simpleSetter) {
<del> returnValue = memberAccess(bme);
<add> returnValue = memberAccess(bme, null);
<ide> }
<ide>
<ide> } else if (that.getLeftTerm() instanceof QualifiedMemberExpression) {
<ide> out(lhsVar, "=");
<ide> super.visit(qme);
<ide> out(",", lhsVar, ".");
<del> returnValue = lhsVar + "." + memberAccess(qme);
<add> returnValue = memberAccess(qme, lhsVar);
<ide> } else {
<ide> super.visit(qme);
<ide> out(".");
<ide> }
<ide> }
<ide>
<del> generateMemberAccess(lhsExpr, new MemberAccessCallback() {
<add> generateMemberAccess(lhsExpr, new GenerateCallback() {
<ide> @Override public void generateValue() {
<ide> int boxType = boxUnboxStart(that.getRightTerm(), that.getLeftTerm());
<ide> that.getRightTerm().visit(GenerateJsVisitor.this);
<ide> boxUnboxEnd(boxType);
<ide> }
<del> }, true);
<add> }, null);
<ide>
<ide> if (returnValue != null) { out(",", returnValue); }
<ide> out(")");
<ide> BaseMemberExpression lhsBME = (BaseMemberExpression) lhs;
<ide> Declaration lhsDecl = lhsBME.getDeclaration();
<ide>
<del> final String getLHS = memberAccess(lhsBME);
<add> final String getLHS = memberAccess(lhsBME, null);
<ide> out("(");
<del> generateMemberAccess(lhsBME, new MemberAccessCallback() {
<add> generateMemberAccess(lhsBME, new GenerateCallback() {
<ide> @Override public void generateValue() {
<ide> out(getLHS, ".", functionName, "(");
<ide> that.getRightTerm().visit(GenerateJsVisitor.this);
<ide> out(")");
<ide> }
<del> }, true);
<add> }, null);
<ide> if (!hasSimpleGetterSetter(lhsDecl)) { out(",", getLHS); }
<ide> out(")");
<ide>
<ide>
<ide> } else {
<ide> final String lhsPrimaryVar = createRetainedTempVar();
<del> final String getLHS = lhsPrimaryVar + "." + memberAccess(lhsQME);
<add> final String getLHS = memberAccess(lhsQME, lhsPrimaryVar);
<ide> out("(", lhsPrimaryVar, "=");
<ide> lhsQME.getPrimary().visit(this);
<del> out(",", lhsPrimaryVar, ".");
<del> generateMemberAccess(lhsQME, new MemberAccessCallback() {
<add> out(",");
<add> generateMemberAccess(lhsQME, new GenerateCallback() {
<ide> @Override public void generateValue() {
<ide> out(getLHS, ".", functionName, "(");
<ide> that.getRightTerm().visit(GenerateJsVisitor.this);
<ide> out(")");
<ide> }
<del> }, false);
<add> }, lhsPrimaryVar);
<ide>
<ide> if (!hasSimpleGetterSetter(lhsQME.getDeclaration())) {
<ide> out(",", getLHS);
<ide> if (term instanceof BaseMemberExpression) {
<ide> BaseMemberExpression bme = (BaseMemberExpression) term;
<ide> boolean simpleSetter = hasSimpleGetterSetter(bme.getDeclaration());
<del> String getMember = memberAccess(bme);
<add> String getMember = memberAccess(bme, null);
<ide> String applyFunc = String.format("%s.%s", getMember, functionName);
<ide> out("(");
<del> generateMemberAccess(bme, applyFunc, true);
<add> generateMemberAccess(bme, applyFunc, null);
<ide> if (!simpleSetter) { out(",", getMember); }
<ide> out(")");
<ide>
<ide> } else if (term instanceof QualifiedMemberExpression) {
<ide> QualifiedMemberExpression qme = (QualifiedMemberExpression) term;
<ide> String primaryVar = createRetainedTempVar();
<del> String getMember = primaryVar + "." + memberAccess(qme);
<add> String getMember = memberAccess(qme, primaryVar);
<ide> String applyFunc = String.format("%s.%s", getMember, functionName);
<ide> out("(", primaryVar, "=");
<ide> qme.getPrimary().visit(this);
<del> out(",", primaryVar, ".");
<del> generateMemberAccess(qme, applyFunc, false);
<add> out(",");
<add> generateMemberAccess(qme, applyFunc, primaryVar);
<ide> if (!hasSimpleGetterSetter(qme.getDeclaration())) {
<ide> out(",", getMember);
<ide> }
<ide> }
<ide> String oldValueVar = createRetainedTempVar("old" + bme.getDeclaration().getName());
<ide> String applyFunc = String.format("%s.%s", oldValueVar, functionName);
<del> out("(", oldValueVar, "=", memberAccess(bme), ",");
<del> generateMemberAccess(bme, applyFunc, true);
<add> out("(", oldValueVar, "=", memberAccess(bme, null), ",");
<add> generateMemberAccess(bme, applyFunc, null);
<ide> out(",", oldValueVar, ")");
<ide>
<ide> } else if (term instanceof QualifiedMemberExpression) {
<ide> String applyFunc = String.format("%s.%s", oldValueVar, functionName);
<ide> out("(", primaryVar, "=");
<ide> qme.getPrimary().visit(this);
<del> out(",", oldValueVar, "=", primaryVar, ".", memberAccess(qme), ",",
<del> primaryVar, ".");
<del> generateMemberAccess(qme, applyFunc, false);
<add> out(",", oldValueVar, "=", memberAccess(qme, primaryVar), ",");
<add> generateMemberAccess(qme, applyFunc, primaryVar);
<ide> out(",", oldValueVar, ")");
<ide> }
<ide> } |
|
Java | apache-2.0 | 7d148f344719aa0a2c58fb7e483c42de25682a7b | 0 | Valkryst/VTerminal | package com.valkryst.AsciiPanel;
import com.valkryst.AsciiPanel.component.AsciiScreen;
import javafx.scene.canvas.Canvas;
import javafx.scene.canvas.GraphicsContext;
import lombok.Getter;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class AsciiPanel extends Canvas {
/** The width of the panel, in characters. */
@Getter private int widthInCharacters;
/** The height of the panel, in characters. */
@Getter private int heightInCharacters;
/** The font to draw with. */
@Getter private AsciiFont font;
/** The cursor. */
@Getter private final AsciiCursor asciiCursor = new AsciiCursor(this);
@Getter private AsciiScreen currentScreen;
/**
* Constructs a new AsciiPanel.
*
* @param widthInCharacters
* The width of the panel, in characters.
*
* @param heightInCharacters
* The height of the panel, in characters.
*
* @param font
* The font to use.
*/
public AsciiPanel(int widthInCharacters, int heightInCharacters, final AsciiFont font) throws NullPointerException {
if (font == null) {
throw new NullPointerException("You must specify a font to use.");
}
if (widthInCharacters < 1) {
widthInCharacters = 1;
}
if (heightInCharacters < 1) {
heightInCharacters = 1;
}
this.font = font;
this.widthInCharacters = widthInCharacters;
this.heightInCharacters = heightInCharacters;
this.setWidth(widthInCharacters * font.getWidth());
this.setHeight(heightInCharacters * font.getHeight());
currentScreen = new AsciiScreen(0, 0, widthInCharacters, heightInCharacters);
}
/** Draws every character of every row onto the canvas. */
public void draw() {
final GraphicsContext gc = this.getGraphicsContext2D();
gc.setFont(font.getFont());
currentScreen.draw(this, font);
}
/**
* Determines whether or not the specified position is within the bounds of the panel.
*
* @param columnIndex
* The x-axis (column) coordinate.
*
* @param rowIndex
* The y-axis (row) coordinate.
*
* @return
* Whether or not the specified position is within the bounds of the panel.
*/
private boolean isPositionValid(final int columnIndex, final int rowIndex) {
if (rowIndex < 0 || rowIndex >= heightInCharacters) {
final Logger logger = LogManager.getLogger();
logger.error("The specified column of " + columnIndex + " exceeds the maximum width of " + widthInCharacters + ".");
return false;
}
if (columnIndex < 0 || columnIndex >= widthInCharacters) {
final Logger logger = LogManager.getLogger();
logger.error("The specified row of " + rowIndex + " exceeds the maximum width of " + widthInCharacters + ".");
return false;
}
return true;
}
}
| src/com/valkryst/AsciiPanel/AsciiPanel.java | package com.valkryst.AsciiPanel;
import com.valkryst.AsciiPanel.component.AsciiComponent;
import com.valkryst.AsciiPanel.component.AsciiScreen;
import javafx.scene.canvas.Canvas;
import javafx.scene.canvas.GraphicsContext;
import lombok.Getter;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.ArrayList;
public class AsciiPanel extends Canvas {
/** The width of the panel, in characters. */
@Getter private int widthInCharacters;
/** The height of the panel, in characters. */
@Getter private int heightInCharacters;
/** The font to draw with. */
@Getter private AsciiFont font;
/** The cursor. */
@Getter private final AsciiCursor asciiCursor = new AsciiCursor(this);
@Getter private AsciiScreen currentScreen;
private ArrayList<AsciiComponent> components = new ArrayList<>();
/**
* Constructs a new AsciiPanel.
*
* @param widthInCharacters
* The width of the panel, in characters.
*
* @param heightInCharacters
* The height of the panel, in characters.
*
* @param font
* The font to use.
*/
public AsciiPanel(int widthInCharacters, int heightInCharacters, final AsciiFont font) throws NullPointerException {
if (font == null) {
throw new NullPointerException("You must specify a font to use.");
}
if (widthInCharacters < 1) {
widthInCharacters = 1;
}
if (heightInCharacters < 1) {
heightInCharacters = 1;
}
this.font = font;
this.widthInCharacters = widthInCharacters;
this.heightInCharacters = heightInCharacters;
this.setWidth(widthInCharacters * font.getWidth());
this.setHeight(heightInCharacters * font.getHeight());
currentScreen = new AsciiScreen(0, 0, widthInCharacters, heightInCharacters);
}
/** Draws every character of every row onto the canvas. */
public void draw() {
final GraphicsContext gc = this.getGraphicsContext2D();
gc.setFont(font.getFont());
// Draw all non-AsciiScreen components:
components.stream()
.filter(component -> component instanceof AsciiScreen == false)
.forEach(component -> component.draw(currentScreen));
// Draw current screen:
currentScreen.draw(this, font);
}
/**
* Determines whether or not the specified position is within the bounds of the panel.
*
* @param columnIndex
* The x-axis (column) coordinate.
*
* @param rowIndex
* The y-axis (row) coordinate.
*
* @return
* Whether or not the specified position is within the bounds of the panel.
*/
private boolean isPositionValid(final int columnIndex, final int rowIndex) {
if (rowIndex < 0 || rowIndex >= heightInCharacters) {
final Logger logger = LogManager.getLogger();
logger.error("The specified column of " + columnIndex + " exceeds the maximum width of " + widthInCharacters + ".");
return false;
}
if (columnIndex < 0 || columnIndex >= widthInCharacters) {
final Logger logger = LogManager.getLogger();
logger.error("The specified row of " + rowIndex + " exceeds the maximum width of " + widthInCharacters + ".");
return false;
}
return true;
}
}
| AsciiScreens can now have a unique set of components associated with themselves.
| src/com/valkryst/AsciiPanel/AsciiPanel.java | AsciiScreens can now have a unique set of components associated with themselves. | <ide><path>rc/com/valkryst/AsciiPanel/AsciiPanel.java
<ide> package com.valkryst.AsciiPanel;
<ide>
<del>import com.valkryst.AsciiPanel.component.AsciiComponent;
<ide> import com.valkryst.AsciiPanel.component.AsciiScreen;
<ide> import javafx.scene.canvas.Canvas;
<ide> import javafx.scene.canvas.GraphicsContext;
<ide> import lombok.Getter;
<ide> import org.apache.logging.log4j.LogManager;
<ide> import org.apache.logging.log4j.Logger;
<del>
<del>import java.util.ArrayList;
<ide>
<ide> public class AsciiPanel extends Canvas {
<ide> /** The width of the panel, in characters. */
<ide> @Getter private final AsciiCursor asciiCursor = new AsciiCursor(this);
<ide>
<ide> @Getter private AsciiScreen currentScreen;
<del>
<del> private ArrayList<AsciiComponent> components = new ArrayList<>();
<ide>
<ide> /**
<ide> * Constructs a new AsciiPanel.
<ide> final GraphicsContext gc = this.getGraphicsContext2D();
<ide> gc.setFont(font.getFont());
<ide>
<del>
<del> // Draw all non-AsciiScreen components:
<del> components.stream()
<del> .filter(component -> component instanceof AsciiScreen == false)
<del> .forEach(component -> component.draw(currentScreen));
<del>
<del> // Draw current screen:
<ide> currentScreen.draw(this, font);
<ide> }
<ide> |
|
Java | mit | f018bd0065ec6276c7d94a071305e4515918793e | 0 | NamelessMC/Nameless-Java-API,NamelessMC/Nameless-Plugin-API | package com.namelessmc.NamelessAPI;
import static com.namelessmc.NamelessAPI.Request.RequestMethod.GET;
import static com.namelessmc.NamelessAPI.Request.RequestMethod.POST;
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import javax.net.ssl.HttpsURLConnection;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
public class Request {
private URL url;
private RequestMethod method;
private String parameters;
private JsonObject response;
public Request(URL baseUrl, Action action, String... parameters) {
try {
url = new URL(appendCharacter(baseUrl.toString(), '/') + action.toString());
} catch (MalformedURLException e) {
throw new IllegalArgumentException("URL or action is malformed (" + e.getMessage() + ")");
}
this.method = action.method;
this.parameters = String.join("&", parameters);
}
public JsonObject getResponse() throws NamelessException {
if (response == null) {
connect();
}
return response;
}
public void connect() throws NamelessException {
if (url.toString().startsWith("https://")){
try {
HttpsURLConnection connection = (HttpsURLConnection) url.openConnection();
connection.setRequestMethod(method.toString());
connection.setRequestProperty("Content-Length", Integer.toString(parameters.length()));
connection.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");
connection.setDoOutput(true);
connection.addRequestProperty("User-Agent", "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0)");
// Initialize output stream
DataOutputStream outputStream = new DataOutputStream(connection.getOutputStream());
// Write request
outputStream.writeBytes(parameters);
// Initialize input stream
InputStream inputStream = connection.getInputStream();
// Handle response
BufferedReader streamReader = new BufferedReader(new InputStreamReader(inputStream, "UTF-8"));
StringBuilder responseBuilder = new StringBuilder();
String responseString;
while ((responseString = streamReader.readLine()) != null)
responseBuilder.append(responseString);
JsonParser parser = new JsonParser();
response = parser.parse(responseBuilder.toString()).getAsJsonObject();
if (response.has("error")) {
// Error with request
String errorMessage = response.get("message").getAsString();
throw new NamelessException(errorMessage);
}
// Close output/input stream
outputStream.flush();
outputStream.close();
inputStream.close();
// Disconnect
connection.disconnect();
} catch (Exception e) {
throw new NamelessException(e);
}
} else {
try {
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod(method.toString());
connection.setRequestProperty("Content-Length", Integer.toString(parameters.length()));
connection.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");
connection.setDoOutput(true);
connection.addRequestProperty("User-Agent", "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0)");
// Initialize output stream
DataOutputStream outputStream = new DataOutputStream(connection.getOutputStream());
// Write request
outputStream.writeBytes(parameters);
// Initialize input stream
InputStream inputStream = connection.getInputStream();
// Handle response
BufferedReader streamReader = new BufferedReader(new InputStreamReader(inputStream, "UTF-8"));
StringBuilder responseBuilder = new StringBuilder();
String responseString;
while ((responseString = streamReader.readLine()) != null)
responseBuilder.append(responseString);
JsonParser parser = new JsonParser();
response = parser.parse(responseBuilder.toString()).getAsJsonObject();
if (response.has("error")) {
// Error with request
String errorMessage = response.get("message").getAsString();
throw new NamelessException(errorMessage);
}
// Close output/input stream
outputStream.flush();
outputStream.close();
inputStream.close();
// Disconnect
connection.disconnect();
} catch (Exception e) {
throw new NamelessException(e);
}
}
}
private static String appendCharacter(String string, char c) {
if (string.endsWith(c + "")) {
return string;
} else {
return string + c;
}
}
public static enum Action {
INFO("info", GET),
GET_ANNOUNCEMENTS("getAnnouncements", GET),
REGISTER("register", POST),
USER_INFO("userInfo", GET),
SET_GROUP("setGroup", POST),
CREATE_REPORT("createReport", POST),
GET_NOTIFICATIONS("getNotifications", GET),
SERVER_INFO("serverInfo", POST),
;
RequestMethod method;
String name;
Action(String name, RequestMethod method){
this.name = name;
this.method = method;
}
@Override
public String toString() {
return name;
}
/*@Override
public String toString() {
List<String> list = Arrays.asList(super.toString().split("_"));
StringBuilder builder = new StringBuilder();
builder.append(list.remove(0).toLowerCase(Locale.ENGLISH));
list.forEach((element) -> builder.append(element.substring(0, 1) + element.substring(1).toLowerCase()));
return builder.toString();
}*/
}
public static enum RequestMethod {
GET, POST
}
}
| src/com/namelessmc/NamelessAPI/Request.java | package com.namelessmc.NamelessAPI;
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import javax.net.ssl.HttpsURLConnection;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import static com.namelessmc.NamelessAPI.Request.RequestMethod.*;
public class Request {
private URL url;
private RequestMethod method;
private String parameters;
private JsonObject response;
public Request(URL baseUrl, Action action, String... parameters) {
try {
url = new URL(appendCharacter(baseUrl.toString(), '/') + action.toString());
} catch (MalformedURLException e) {
throw new IllegalArgumentException("URL or action is malformed (" + e.getMessage() + ")");
}
this.method = action.method;
this.parameters = String.join("&", parameters);
}
public JsonObject getResponse() throws NamelessException {
if (response == null) {
connect();
}
return response;
}
public void connect() throws NamelessException {
if (url.toString().startsWith("https://")){
try {
HttpsURLConnection connection = (HttpsURLConnection) url.openConnection();
connection.setRequestMethod(method.toString());
connection.setRequestProperty("Content-Length", Integer.toString(parameters.length()));
connection.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");
connection.setDoOutput(true);
connection.addRequestProperty("User-Agent", "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0)");
// Initialize output stream
DataOutputStream outputStream = new DataOutputStream(connection.getOutputStream());
// Write request
outputStream.writeBytes(parameters);
// Initialize input stream
InputStream inputStream = connection.getInputStream();
// Handle response
BufferedReader streamReader = new BufferedReader(new InputStreamReader(inputStream, "UTF-8"));
StringBuilder responseBuilder = new StringBuilder();
String responseString;
while ((responseString = streamReader.readLine()) != null)
responseBuilder.append(responseString);
JsonParser parser = new JsonParser();
response = parser.parse(responseBuilder.toString()).getAsJsonObject();
if (response.has("error")) {
// Error with request
String errorMessage = response.get("message").getAsString();
throw new NamelessException(errorMessage);
}
// Close output/input stream
outputStream.flush();
outputStream.close();
inputStream.close();
// Disconnect
connection.disconnect();
} catch (Exception e) {
throw new NamelessException(e);
}
} else {
try {
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setRequestMethod(method.toString());
connection.setRequestProperty("Content-Length", Integer.toString(parameters.length()));
connection.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");
connection.setDoOutput(true);
connection.addRequestProperty("User-Agent", "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0)");
// Initialize output stream
DataOutputStream outputStream = new DataOutputStream(connection.getOutputStream());
// Write request
outputStream.writeBytes(parameters);
// Initialize input stream
InputStream inputStream = connection.getInputStream();
// Handle response
BufferedReader streamReader = new BufferedReader(new InputStreamReader(inputStream, "UTF-8"));
StringBuilder responseBuilder = new StringBuilder();
String responseString;
while ((responseString = streamReader.readLine()) != null)
responseBuilder.append(responseString);
JsonParser parser = new JsonParser();
response = parser.parse(responseBuilder.toString()).getAsJsonObject();
if (response.has("error")) {
// Error with request
String errorMessage = response.get("message").getAsString();
throw new NamelessException(errorMessage);
}
// Close output/input stream
outputStream.flush();
outputStream.close();
inputStream.close();
// Disconnect
connection.disconnect();
} catch (Exception e) {
throw new NamelessException(e);
}
}
}
private static String appendCharacter(String string, char c) {
if (string.endsWith(c + "")) {
return string;
} else {
return string + c;
}
}
public static enum Action {
INFO(GET),
GET_ANNOUNCEMENTS(GET),
REGISTER(POST),
USER_INFO(GET),
SET_GROUP(POST),
CREATE_REPORT(POST),
GET_NOTIFICATIONS(GET),
SERVER_INFO(POST),
;
RequestMethod method;
Action(RequestMethod method){
this.method = method;
}
@Override
public String toString() {
List<String> list = Arrays.asList(super.toString().split("_"));
StringBuilder builder = new StringBuilder();
builder.append(list.remove(0).toLowerCase(Locale.ENGLISH));
list.forEach((element) -> builder.append(element.substring(0, 1) + element.substring(1).toLowerCase()));
return builder.toString();
}
}
public static enum RequestMethod {
GET, POST
}
}
| Just enter action names manually
| src/com/namelessmc/NamelessAPI/Request.java | Just enter action names manually | <ide><path>rc/com/namelessmc/NamelessAPI/Request.java
<ide> package com.namelessmc.NamelessAPI;
<add>
<add>import static com.namelessmc.NamelessAPI.Request.RequestMethod.GET;
<add>import static com.namelessmc.NamelessAPI.Request.RequestMethod.POST;
<ide>
<ide> import java.io.BufferedReader;
<ide> import java.io.DataOutputStream;
<ide> import java.net.HttpURLConnection;
<ide> import java.net.MalformedURLException;
<ide> import java.net.URL;
<del>import java.util.Arrays;
<del>import java.util.List;
<del>import java.util.Locale;
<ide>
<ide> import javax.net.ssl.HttpsURLConnection;
<ide>
<ide> import com.google.gson.JsonObject;
<ide> import com.google.gson.JsonParser;
<del>
<del>import static com.namelessmc.NamelessAPI.Request.RequestMethod.*;
<ide>
<ide> public class Request {
<ide>
<ide>
<ide> public static enum Action {
<ide>
<del> INFO(GET),
<del> GET_ANNOUNCEMENTS(GET),
<del> REGISTER(POST),
<del> USER_INFO(GET),
<del> SET_GROUP(POST),
<del> CREATE_REPORT(POST),
<del> GET_NOTIFICATIONS(GET),
<del> SERVER_INFO(POST),
<add> INFO("info", GET),
<add> GET_ANNOUNCEMENTS("getAnnouncements", GET),
<add> REGISTER("register", POST),
<add> USER_INFO("userInfo", GET),
<add> SET_GROUP("setGroup", POST),
<add> CREATE_REPORT("createReport", POST),
<add> GET_NOTIFICATIONS("getNotifications", GET),
<add> SERVER_INFO("serverInfo", POST),
<ide>
<ide> ;
<ide>
<ide> RequestMethod method;
<add> String name;
<ide>
<del> Action(RequestMethod method){
<add> Action(String name, RequestMethod method){
<add> this.name = name;
<ide> this.method = method;
<ide> }
<ide>
<ide> @Override
<add> public String toString() {
<add> return name;
<add> }
<add>
<add> /*@Override
<ide> public String toString() {
<ide> List<String> list = Arrays.asList(super.toString().split("_"));
<ide> StringBuilder builder = new StringBuilder();
<ide> builder.append(list.remove(0).toLowerCase(Locale.ENGLISH));
<ide> list.forEach((element) -> builder.append(element.substring(0, 1) + element.substring(1).toLowerCase()));
<ide> return builder.toString();
<del> }
<add> }*/
<ide>
<ide> }
<ide> |
|
Java | mit | 17d31de00d6fcc5250a2c4f8c643c83f5ac724c5 | 0 | Fundynamic/dune2themaker4j,Fundynamic/dune2themaker4j | package com.fundynamic.d2tm.game.math;
public class Vector2D<T extends Number> {
private final T x, y;
public static Vector2D zero() {
return new Vector2D(0, 0);
}
public Vector2D(T x, T y) {
this.x = x;
this.y = y;
}
public T getX() {
return x;
}
public T getY() {
return y;
}
public Vector2D<Float> move(float xVelocity, float yVelocity, float speed) {
float newX = x.floatValue() + (speed * xVelocity);
float newY = y.floatValue() + (speed * yVelocity);
return new Vector2D<>(newX, newY);
}
public Vector2D<Integer> toInt() {
int newX = x.intValue();
int newY = y.intValue();
return new Vector2D<>(newX, newY);
}
@Override
public String toString() {
return "Vector2D{" +
"x=" + x +
", y=" + y +
'}';
}
}
| src/main/java/com/fundynamic/d2tm/game/math/Vector2D.java | package com.fundynamic.d2tm.game.math;
public class Vector2D<T extends Number> {
private final T x, y;
public static Vector2D zero() {
return new Vector2D(0, 0);
}
public Vector2D(T x, T y) {
this.x = x;
this.y = y;
}
public T getX() {
return x;
}
public T getY() {
return y;
}
public Vector2D<Float> move(float xVelocity, float yVelocity, float speed) {
float newX = x.floatValue() + (speed * xVelocity);
float newY = y.floatValue() + (speed * yVelocity);
return new Vector2D<>(newX, newY);
}
public Vector2D<Integer> toInt() {
int newX = x.intValue();
int newY = y.intValue();
return new Vector2D<>(newX, newY);
}
}
| Add toString for better logging/output | src/main/java/com/fundynamic/d2tm/game/math/Vector2D.java | Add toString for better logging/output | <ide><path>rc/main/java/com/fundynamic/d2tm/game/math/Vector2D.java
<ide> int newY = y.intValue();
<ide> return new Vector2D<>(newX, newY);
<ide> }
<add>
<add> @Override
<add> public String toString() {
<add> return "Vector2D{" +
<add> "x=" + x +
<add> ", y=" + y +
<add> '}';
<add> }
<ide> } |
|
Java | agpl-3.0 | c3c65117b2c3b71b152f6552f8a97fa5c28f4277 | 0 | wbaumann/SmartReceiptsLibrary,JuliaSoboleva/SmartReceiptsLibrary,JuliaSoboleva/SmartReceiptsLibrary,wbaumann/SmartReceiptsLibrary,wbaumann/SmartReceiptsLibrary,JuliaSoboleva/SmartReceiptsLibrary | package co.smartreceipts.android.fragments;
import android.app.Activity;
import android.content.Context;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v7.app.ActionBar;
import android.support.v7.widget.Toolbar;
import android.text.TextUtils;
import android.text.format.DateFormat;
import android.text.format.Time;
import android.text.method.TextKeyListener;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.view.inputmethod.InputMethodManager;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.AutoCompleteTextView;
import android.widget.CheckBox;
import android.widget.EditText;
import android.widget.LinearLayout;
import android.widget.Spinner;
import android.widget.Toast;
import java.io.File;
import java.sql.Date;
import java.util.Collections;
import java.util.List;
import co.smartreceipts.android.R;
import co.smartreceipts.android.activities.DefaultFragmentProvider;
import co.smartreceipts.android.activities.NavigationHandler;
import co.smartreceipts.android.activities.SmartReceiptsActivity;
import co.smartreceipts.android.adapters.TaxAutoCompleteAdapter;
import co.smartreceipts.android.analytics.events.Events;
import co.smartreceipts.android.apis.ExchangeRateServiceManager;
import co.smartreceipts.android.apis.MemoryLeakSafeCallback;
import co.smartreceipts.android.date.DateEditText;
import co.smartreceipts.android.model.Category;
import co.smartreceipts.android.model.PaymentMethod;
import co.smartreceipts.android.model.Receipt;
import co.smartreceipts.android.model.Trip;
import co.smartreceipts.android.model.factory.ExchangeRateBuilderFactory;
import co.smartreceipts.android.model.factory.ReceiptBuilderFactory;
import co.smartreceipts.android.model.gson.ExchangeRate;
import co.smartreceipts.android.persistence.DatabaseHelper;
import co.smartreceipts.android.persistence.Preferences;
import co.smartreceipts.android.persistence.database.controllers.TableEventsListener;
import co.smartreceipts.android.persistence.database.controllers.impl.StubTableEventsListener;
import co.smartreceipts.android.persistence.database.operations.DatabaseOperationMetadata;
import co.smartreceipts.android.purchases.PurchaseSource;
import co.smartreceipts.android.purchases.Subscription;
import co.smartreceipts.android.purchases.SubscriptionManager;
import co.smartreceipts.android.widget.HideSoftKeyboardOnTouchListener;
import co.smartreceipts.android.widget.NetworkRequestAwareEditText;
import co.smartreceipts.android.widget.ShowSoftKeyboardOnFocusChangeListener;
import co.smartreceipts.android.widget.UserSelectionTrackingOnItemSelectedListener;
import retrofit.RetrofitError;
import retrofit.client.Response;
import rx.android.schedulers.AndroidSchedulers;
import rx.functions.Action1;
import rx.schedulers.Schedulers;
import wb.android.autocomplete.AutoCompleteAdapter;
public class ReceiptCreateEditFragment extends WBFragment implements View.OnFocusChangeListener, NetworkRequestAwareEditText.RetryListener, DatabaseHelper.ReceiptAutoCompleteListener {
private static final String TAG = ReceiptCreateEditFragment.class.getSimpleName();
private static final String ARG_FILE = "arg_file";
private static final String KEY_OUT_STATE_IS_EXCHANGE_RATE_VISIBLE = "key_is_exchange_rate_visible";
// Metadata
private Trip mTrip;
private Receipt mReceipt;
private File mFile;
// Views
private AutoCompleteTextView nameBox;
private EditText priceBox;
private AutoCompleteTextView taxBox;
private Spinner currencySpinner;
private NetworkRequestAwareEditText exchangeRateBox;
private DateEditText dateBox;
private AutoCompleteTextView commentBox;
private Spinner categoriesSpinner;
private CheckBox reimbursable;
private CheckBox fullpage;
private Spinner paymentMethodsSpinner;
private EditText extra_edittext_box_1;
private EditText extra_edittext_box_2;
private EditText extra_edittext_box_3;
private ViewGroup mPaymentMethodsContainer;
private ViewGroup mExchangeRateContainer;
private Toolbar mToolbar;
private View mFocusedView;
// Rx
private rx.Subscription mIdSubscription;
private TableEventsListener<Category> mCategoryTableEventsListener;
private TableEventsListener<PaymentMethod> mPaymentMethodTableEventsListener;
// Misc
private MemoryLeakSafeCallback<ExchangeRate, EditText> mLastExchangeRateFetchCallback;
private NavigationHandler mNavigationHandler;
private ExchangeRateServiceManager mExchangeRateServiceManager;
private ReceiptInputCache mReceiptInputCache;
private AutoCompleteAdapter mReceiptsNameAutoCompleteAdapter, mReceiptsCommentAutoCompleteAdapter;
private ArrayAdapter<CharSequence> mCurrenciesAdapter;
private List<Category> mCategoriesList;
private ArrayAdapter<Category> mCategoriesAdpater;
private ArrayAdapter<PaymentMethod> mPaymentMethodsAdapter;
/**
* Creates a new instance of this fragment for a new receipt
*
* @param trip - the parent trip of this receipt
* @param file - the file associated with this receipt or null if we do not have one
* @return the new instance of this fragment
*/
public static ReceiptCreateEditFragment newInstance(@NonNull Trip trip, @Nullable File file) {
return newInstance(trip, null, file);
}
/**
* Creates a new instance of this fragment to edit an existing receipt
*
* @param trip - the parent trip of this receipt
* @param receiptToEdit - the receipt to edit
* @return the new instance of this fragment
*/
public static ReceiptCreateEditFragment newInstance(@NonNull Trip trip, @NonNull Receipt receiptToEdit) {
return newInstance(trip, receiptToEdit, null);
}
private static ReceiptCreateEditFragment newInstance(@NonNull Trip trip, @Nullable Receipt receiptToEdit, @Nullable File file) {
final ReceiptCreateEditFragment fragment = new ReceiptCreateEditFragment();
final Bundle args = new Bundle();
args.putParcelable(Trip.PARCEL_KEY, trip);
args.putParcelable(Receipt.PARCEL_KEY, receiptToEdit);
args.putSerializable(ARG_FILE, file);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mTrip = getArguments().getParcelable(Trip.PARCEL_KEY);
mReceipt = getArguments().getParcelable(Receipt.PARCEL_KEY);
mFile = (File) getArguments().getSerializable(ARG_FILE);
mReceiptInputCache = new ReceiptInputCache(getFragmentManager());
mNavigationHandler = new NavigationHandler(getActivity(), new DefaultFragmentProvider());
mExchangeRateServiceManager = new ExchangeRateServiceManager(getFragmentManager());
mCurrenciesAdapter = new ArrayAdapter<>(getActivity(), android.R.layout.simple_spinner_item, getPersistenceManager().getDatabase().getCurrenciesList());
mCategoriesList = Collections.emptyList();
mCategoriesAdpater = new ArrayAdapter<>(getActivity(), android.R.layout.simple_spinner_item, Collections.<Category>emptyList());
mPaymentMethodsAdapter = new ArrayAdapter<>(getActivity(), android.R.layout.simple_spinner_item, Collections.<PaymentMethod>emptyList());
setHasOptionsMenu(true);
}
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
return inflater.inflate(R.layout.update_receipt, container, false);
}
@Override
public void onViewCreated(View rootView, @Nullable Bundle savedInstanceState) {
super.onViewCreated(rootView, savedInstanceState);
this.nameBox = (AutoCompleteTextView) getFlex().getSubView(getActivity(), rootView, R.id.DIALOG_RECEIPTMENU_NAME);
this.priceBox = (EditText) getFlex().getSubView(getActivity(), rootView, R.id.DIALOG_RECEIPTMENU_PRICE);
this.taxBox = (AutoCompleteTextView) getFlex().getSubView(getActivity(), rootView, R.id.DIALOG_RECEIPTMENU_TAX);
this.currencySpinner = (Spinner) getFlex().getSubView(getActivity(), rootView, R.id.DIALOG_RECEIPTMENU_CURRENCY);
this.exchangeRateBox = (NetworkRequestAwareEditText) getFlex().getSubView(getActivity(), rootView, R.id.DIALOG_RECEIPTMENU_EXCHANGE_RATE);
mExchangeRateContainer = (ViewGroup) getFlex().getSubView(getActivity(), rootView, R.id.exchange_rate_container);
this.dateBox = (DateEditText) getFlex().getSubView(getActivity(), rootView, R.id.DIALOG_RECEIPTMENU_DATE);
this.commentBox = (AutoCompleteTextView) getFlex().getSubView(getActivity(), rootView, R.id.DIALOG_RECEIPTMENU_COMMENT);
this.categoriesSpinner = (Spinner) getFlex().getSubView(getActivity(), rootView, R.id.DIALOG_RECEIPTMENU_CATEGORY);
this.reimbursable = (CheckBox) getFlex().getSubView(getActivity(), rootView, R.id.DIALOG_RECEIPTMENU_EXPENSABLE);
this.fullpage = (CheckBox) getFlex().getSubView(getActivity(), rootView, R.id.DIALOG_RECEIPTMENU_FULLPAGE);
this.paymentMethodsSpinner = (Spinner) getFlex().getSubView(getActivity(), rootView, R.id.dialog_receiptmenu_payment_methods_spinner);
mPaymentMethodsContainer = (ViewGroup) getFlex().getSubView(getActivity(), rootView, R.id.payment_methods_container);
// Extras
final LinearLayout extras = (LinearLayout) getFlex().getSubView(getActivity(), rootView, R.id.DIALOG_RECEIPTMENU_EXTRAS);
this.extra_edittext_box_1 = (EditText) extras.findViewWithTag(getFlexString(R.string.RECEIPTMENU_TAG_EXTRA_EDITTEXT_1));
this.extra_edittext_box_2 = (EditText) extras.findViewWithTag(getFlexString(R.string.RECEIPTMENU_TAG_EXTRA_EDITTEXT_2));
this.extra_edittext_box_3 = (EditText) extras.findViewWithTag(getFlexString(R.string.RECEIPTMENU_TAG_EXTRA_EDITTEXT_3));
// Toolbar stuff
mToolbar = (Toolbar) rootView.findViewById(R.id.toolbar);
if (mNavigationHandler.isDualPane()) {
mToolbar.setVisibility(View.GONE);
} else {
setSupportActionBar(mToolbar);
}
// Set each focus listener, so we can track the focus view across resume -> pauses
this.nameBox.setOnFocusChangeListener(this);
this.priceBox.setOnFocusChangeListener(this);
this.taxBox.setOnFocusChangeListener(this);
this.currencySpinner.setOnFocusChangeListener(this);
this.dateBox.setOnFocusChangeListener(this);
this.commentBox.setOnFocusChangeListener(this);
// Custom view properties
exchangeRateBox.setFailedHint(R.string.DIALOG_RECEIPTMENU_HINT_EXCHANGE_RATE_FAILED);
// Set click listeners
dateBox.setOnTouchListener(new HideSoftKeyboardOnTouchListener());
categoriesSpinner.setOnTouchListener(new HideSoftKeyboardOnTouchListener());
currencySpinner.setOnTouchListener(new HideSoftKeyboardOnTouchListener());
// Show default dictionary with auto-complete
nameBox.setKeyListener(TextKeyListener.getInstance(true, TextKeyListener.Capitalize.SENTENCES));
// Set-up tax layers
if (getPersistenceManager().getPreferences().includeTaxField()) {
priceBox.setHint(getFlexString(R.string.DIALOG_RECEIPTMENU_HINT_PRICE_SHORT));
taxBox.setVisibility(View.VISIBLE);
}
// Configure dropdown defaults for currencies
mCurrenciesAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
currencySpinner.setAdapter(mCurrenciesAdapter);
// And the exchange rate processing for our currencies
final boolean exchangeRateIsVisible = savedInstanceState != null && savedInstanceState.getBoolean(KEY_OUT_STATE_IS_EXCHANGE_RATE_VISIBLE);
if (exchangeRateIsVisible) {
// Note: the restoration of selected spinner items (in the currency spinner) is delayed so we use this state tracker to restore immediately
mExchangeRateContainer.setVisibility(View.VISIBLE);
}
currencySpinner.setOnItemSelectedListener(new UserSelectionTrackingOnItemSelectedListener() {
@Override
public void onUserSelectedNewItem(AdapterView<?> parent, View view, int position, long id, int previousPosition) {
// Then determine if we should show/hide the box
final String baseCurrencyCode = mCurrenciesAdapter.getItem(position).toString();
configureExchangeRateField(baseCurrencyCode);
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
// Intentional no-op
}
});
// Outline date defaults
dateBox.setFocusableInTouchMode(false);
dateBox.setOnClickListener(getDateManager().getDateEditTextListener());
// Lastly, preset adapters for "new" receipts
final boolean isNewReceipt = mReceipt == null;
if (isNewReceipt) {
if (getPersistenceManager().getPreferences().includeTaxField()) {
taxBox.setAdapter(new TaxAutoCompleteAdapter(getActivity(), priceBox, taxBox, getPersistenceManager().getPreferences(), getPersistenceManager().getPreferences().getDefaultTaxPercentage()));
}
}
}
@Override
public void onActivityCreated(@Nullable Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
// Configure things if it's not a restored fragment
if (savedInstanceState == null) {
final boolean isNewReceipt = mReceipt == null;
if (isNewReceipt) {
final Time now = new Time();
now.setToNow();
if (mReceiptInputCache.getCachedDate() == null) {
if (getPersistenceManager().getPreferences().defaultToFirstReportDate()) {
dateBox.date = mTrip.getStartDate();
} else {
dateBox.date = new Date(now.toMillis(false));
}
} else {
dateBox.date = mReceiptInputCache.getCachedDate();
}
dateBox.setText(DateFormat.getDateFormat(getActivity()).format(dateBox.date));
final Preferences preferences = getPersistenceManager().getPreferences();
reimbursable.setChecked(preferences.doReceiptsDefaultAsReimbursable());
if (preferences.matchCommentToCategory() && preferences.matchNameToCategory()) {
if (mFocusedView == null) {
mFocusedView = priceBox;
}
} else if (preferences.matchNameToCategory()) {
if (mFocusedView == null) {
mFocusedView = priceBox;
}
}
int idx = mCurrenciesAdapter.getPosition((mTrip != null) ? mTrip.getDefaultCurrencyCode() : preferences.getDefaultCurreny());
int cachedIdx = (mReceiptInputCache.getCachedCurrency() != null) ? mCurrenciesAdapter.getPosition(mReceiptInputCache.getCachedCurrency()) : -1;
idx = (cachedIdx >= 0) ? cachedIdx : idx;
if (idx >= 0) {
currencySpinner.setSelection(idx);
}
if (!mTrip.getDefaultCurrencyCode().equals(mReceiptInputCache.getCachedCurrency())) {
configureExchangeRateField(mReceiptInputCache.getCachedCurrency());
}
fullpage.setChecked(preferences.shouldDefaultToFullPage());
} else {
nameBox.setText(mReceipt.getName());
priceBox.setText(mReceipt.getPrice().getDecimalFormattedPrice());
dateBox.setText(mReceipt.getFormattedDate(getActivity(), getPersistenceManager().getPreferences().getDateSeparator()));
dateBox.date = mReceipt.getDate();
commentBox.setText(mReceipt.getComment());
taxBox.setText(mReceipt.getTax().getDecimalFormattedPrice());
final ExchangeRate exchangeRate = mReceipt.getPrice().getExchangeRate();
if (exchangeRate.supportsExchangeRateFor(mTrip.getDefaultCurrencyCode())) {
exchangeRateBox.setText(exchangeRate.getDecimalFormattedExchangeRate(mTrip.getDefaultCurrencyCode()));
}
int idx = mCurrenciesAdapter.getPosition(mReceipt.getPrice().getCurrencyCode());
if (idx > 0) {
currencySpinner.setSelection(idx);
}
if (mReceipt.getPrice().getCurrency().equals(mTrip.getPrice().getCurrency())) {
mExchangeRateContainer.setVisibility(View.GONE);
} else {
mExchangeRateContainer.setVisibility(View.VISIBLE);
}
reimbursable.setChecked(mReceipt.isReimbursable());
fullpage.setChecked(mReceipt.isFullPage());
if (extra_edittext_box_1 != null && mReceipt.hasExtraEditText1()) {
extra_edittext_box_1.setText(mReceipt.getExtraEditText1());
}
if (extra_edittext_box_2 != null && mReceipt.hasExtraEditText2()) {
extra_edittext_box_2.setText(mReceipt.getExtraEditText2());
}
if (extra_edittext_box_3 != null && mReceipt.hasExtraEditText3()) {
extra_edittext_box_3.setText(mReceipt.getExtraEditText3());
}
}
// Focused View
if (mFocusedView == null) {
mFocusedView = nameBox;
}
}
// Configure items that require callbacks
mCategoryTableEventsListener = new StubTableEventsListener<Category>() {
@Override
public void onGetSuccess(@NonNull List<Category> list) {
if (isAdded()) {
mCategoriesList = list;
mCategoriesAdpater = new ArrayAdapter<>(getActivity(), android.R.layout.simple_spinner_item, list);
mCategoriesAdpater.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
categoriesSpinner.setAdapter(mCategoriesAdpater);
if (mReceipt == null) {
final Preferences preferences = getPersistenceManager().getPreferences();
if (preferences.matchCommentToCategory() || preferences.matchNameToCategory()) {
categoriesSpinner.setOnItemSelectedListener(new SpinnerSelectionListener());
}
if (preferences.predictCategories()) { // Predict Breakfast, Lunch, Dinner by the hour
if (mReceiptInputCache.getCachedCategory() == null) {
final Time now = new Time();
now.setToNow();
String nameToIndex = null;
if (now.hour >= 4 && now.hour < 11) { // Breakfast hours
nameToIndex = getString(R.string.category_breakfast);
} else if (now.hour >= 11 && now.hour < 16) { // Lunch hours
nameToIndex = getString(R.string.category_lunch);
} else if (now.hour >= 16 && now.hour < 23) { // Dinner hours
nameToIndex = getString(R.string.category_dinner);
}
if (nameToIndex != null) {
for (int i = 0; i < mCategoriesAdpater.getCount(); i++) {
if (nameToIndex.equals(mCategoriesAdpater.getItem(i).getName())) {
categoriesSpinner.setSelection(i);
break; // Exit loop now
}
}
}
} else {
int idx = mCategoriesAdpater.getPosition(mReceiptInputCache.getCachedCategory());
if (idx > 0) {
categoriesSpinner.setSelection(idx);
}
}
}
} else {
categoriesSpinner.setSelection(mCategoriesAdpater.getPosition(mReceipt.getCategory()));
}
}
}
};
mPaymentMethodTableEventsListener = new StubTableEventsListener<PaymentMethod>() {
@Override
public void onGetSuccess(@NonNull List<PaymentMethod> list) {
if (isAdded()) {
mPaymentMethodsAdapter = new ArrayAdapter<>(getActivity(), android.R.layout.simple_spinner_item, list);
mPaymentMethodsAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
paymentMethodsSpinner.setAdapter(mPaymentMethodsAdapter);
if (getPersistenceManager().getPreferences().getUsesPaymentMethods()) {
mPaymentMethodsContainer.setVisibility(View.VISIBLE);
if (mReceipt != null) {
final PaymentMethod oldPaymentMethod = mReceipt.getPaymentMethod();
if (oldPaymentMethod != null) {
final int paymentIdx = mPaymentMethodsAdapter.getPosition(oldPaymentMethod);
if (paymentIdx > 0) {
paymentMethodsSpinner.setSelection(paymentIdx);
}
}
}
}
}
}
};
getSmartReceiptsApplication().getTableControllerManager().getCategoriesTableController().subscribe(mCategoryTableEventsListener);
getSmartReceiptsApplication().getTableControllerManager().getPaymentMethodsTableController().subscribe(mPaymentMethodTableEventsListener);
getSmartReceiptsApplication().getTableControllerManager().getCategoriesTableController().get();
getSmartReceiptsApplication().getTableControllerManager().getPaymentMethodsTableController().get();
}
@Override
public void onResume() {
super.onResume();
final boolean isNewReceipt = mReceipt == null;
final String title;
if (isNewReceipt) {
title = getFlexString(R.string.DIALOG_RECEIPTMENU_TITLE_NEW);
} else {
if (getPersistenceManager().getPreferences().isShowReceiptID()) {
title = String.format(getFlexString(R.string.DIALOG_RECEIPTMENU_TITLE_EDIT_ID), mReceipt.getId());
} else {
title = getFlexString(R.string.DIALOG_RECEIPTMENU_TITLE_EDIT);
}
}
final ActionBar actionBar = getSupportActionBar();
if (actionBar != null) {
actionBar.setHomeButtonEnabled(true);
actionBar.setDisplayHomeAsUpEnabled(true);
actionBar.setHomeAsUpIndicator(R.drawable.ic_action_cancel);
actionBar.setTitle(title);
actionBar.setSubtitle("");
}
if (isNewReceipt && getPersistenceManager().getPreferences().isShowReceiptID()) {
mIdSubscription = getPersistenceManager().getDatabase().getNextReceiptAutoIncremenetIdHelper()
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Action1<Integer>() {
@Override
public void call(Integer integer) {
if (isResumed()) {
final ActionBar actionBar = getSupportActionBar();
if (actionBar != null) {
final String titleWithId = String.format(getFlexString(R.string.DIALOG_RECEIPTMENU_TITLE_NEW_ID), integer);
actionBar.setTitle(titleWithId);
}
}
}
});
}
if (isNewReceipt) {
if (getPersistenceManager().getPreferences().enableAutoCompleteSuggestions()) {
final DatabaseHelper db = getPersistenceManager().getDatabase();
if (mReceiptsNameAutoCompleteAdapter == null) {
mReceiptsNameAutoCompleteAdapter = AutoCompleteAdapter.getInstance(getActivity(), DatabaseHelper.TAG_RECEIPTS_NAME, db, db);
} else {
mReceiptsNameAutoCompleteAdapter.reset();
}
if (mReceiptsCommentAutoCompleteAdapter == null) {
mReceiptsCommentAutoCompleteAdapter = AutoCompleteAdapter.getInstance(getActivity(), DatabaseHelper.TAG_RECEIPTS_COMMENT, db);
} else {
mReceiptsCommentAutoCompleteAdapter.reset();
}
nameBox.setAdapter(mReceiptsNameAutoCompleteAdapter);
commentBox.setAdapter(mReceiptsCommentAutoCompleteAdapter);
}
}
if (mFocusedView != null) {
mFocusedView.requestFocus(); // Make sure we're focused on the right view
}
exchangeRateBox.setRetryListener(this);
getPersistenceManager().getDatabase().registerReceiptAutoCompleteListener(this);
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
inflater.inflate(R.menu.menu_save, menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == android.R.id.home) {
mNavigationHandler.navigateToReportInfoFragment(mTrip);
return true;
}
if (item.getItemId() == R.id.action_save) {
saveReceipt();
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public void onFocusChange(View v, boolean hasFocus) {
mFocusedView = hasFocus ? v : null;
if (mReceipt == null) {
// Only launch if we have focus and it's a new receipt
new ShowSoftKeyboardOnFocusChangeListener().onFocusChange(v, hasFocus);
}
}
@Override
public void onUserRetry() {
if (getPersistenceManager().getSubscriptionCache().getSubscriptionWallet().hasSubscription(Subscription.SmartReceiptsPlus)) {
Log.i(TAG, "Attempting to retry with valid subscription. Submitting request directly");
submitExchangeRateRequest((String) currencySpinner.getSelectedItem());
} else {
Log.i(TAG, "Attempting to retry without valid subscription. Directing user to purchase intent");
final Activity activity = getActivity();
if (activity instanceof SmartReceiptsActivity) {
final SmartReceiptsActivity smartReceiptsActivity = (SmartReceiptsActivity) activity;
final SubscriptionManager subscriptionManager = smartReceiptsActivity.getSubscriptionManager();
if (subscriptionManager != null) {
subscriptionManager.queryBuyIntent(Subscription.SmartReceiptsPlus, PurchaseSource.ExchangeRate);
}
}
}
}
@Override
public void onReceiptRowAutoCompleteQueryResult(String name, String price, String category) {
if (isAdded()) {
if (nameBox != null && name != null) {
nameBox.setText(name);
nameBox.setSelection(name.length());
}
if (priceBox != null && price != null && priceBox.getText().length() == 0) {
priceBox.setText(price);
}
if (categoriesSpinner != null && category != null) {
for (int i = 0; i < mCategoriesList.size(); i++) {
if (category.equals(mCategoriesList.get(i).getName())) {
categoriesSpinner.setSelection(mCategoriesList.indexOf(mCategoriesList.get(i)));
break;
}
}
}
}
}
@Override
public void onPause() {
// Notify the downstream adapters
if (mReceiptsNameAutoCompleteAdapter != null) {
mReceiptsNameAutoCompleteAdapter.onPause();
}
if (mReceiptsCommentAutoCompleteAdapter != null) {
mReceiptsCommentAutoCompleteAdapter.onPause();
}
if (mIdSubscription != null) {
mIdSubscription.unsubscribe();
mIdSubscription = null;
}
// Dismiss the soft keyboard
final InputMethodManager inputMethodManager = (InputMethodManager) getContext().getSystemService(Context.INPUT_METHOD_SERVICE);
if (inputMethodManager != null) {
if (mFocusedView != null) {
inputMethodManager.hideSoftInputFromWindow(mFocusedView.getWindowToken(), 0);
} else {
Log.w(TAG, "Unable to dismiss soft keyboard due to a null view");
}
}
exchangeRateBox.setRetryListener(null);
getPersistenceManager().getDatabase().unregisterReceiptAutoCompleteListener();
super.onPause();
}
@Override
public void onSaveInstanceState(Bundle outState) {
if (mExchangeRateContainer != null && outState != null) {
outState.putBoolean(KEY_OUT_STATE_IS_EXCHANGE_RATE_VISIBLE, mExchangeRateContainer.getVisibility() == View.VISIBLE);
}
super.onSaveInstanceState(outState);
}
@Override
public void onDestroy() {
getSmartReceiptsApplication().getTableControllerManager().getCategoriesTableController().unsubscribe(mCategoryTableEventsListener);
getSmartReceiptsApplication().getTableControllerManager().getPaymentMethodsTableController().unsubscribe(mPaymentMethodTableEventsListener);
super.onDestroy();
}
private void configureExchangeRateField(@Nullable String baseCurrencyCode) {
final String exchangeRateCurrencyCode = mTrip.getDefaultCurrencyCode();
if (exchangeRateCurrencyCode.equals(baseCurrencyCode) || baseCurrencyCode == null) {
mExchangeRateContainer.setVisibility(View.GONE);
exchangeRateBox.setText(""); // Clear out if we're hiding the box
} else {
mExchangeRateContainer.setVisibility(View.VISIBLE);
submitExchangeRateRequest(baseCurrencyCode);
}
}
private synchronized void submitExchangeRateRequest(@NonNull String baseCurrencyCode) {
exchangeRateBox.setText(""); // Clear results to avoid stale data here
if (getPersistenceManager().getSubscriptionCache().getSubscriptionWallet().hasSubscription(Subscription.SmartReceiptsPlus)) {
Log.i(TAG, "Submitting exchange rate request");
getSmartReceiptsApplication().getAnalyticsManager().record(Events.Receipts.RequestExchangeRate);
final String exchangeRateCurrencyCode = mTrip.getDefaultCurrencyCode();
exchangeRateBox.setCurrentState(NetworkRequestAwareEditText.State.Loading);
if (mLastExchangeRateFetchCallback != null) {
// Ignore any outstanding results to not confuse ourselves
mLastExchangeRateFetchCallback.ignoreResult();
}
mLastExchangeRateFetchCallback = new MemoryLeakSafeCallback<ExchangeRate, EditText>(exchangeRateBox) {
@Override
public void success(EditText editText, ExchangeRate exchangeRate, Response response) {
if (exchangeRate != null && exchangeRate.supportsExchangeRateFor(exchangeRateCurrencyCode)) {
getSmartReceiptsApplication().getAnalyticsManager().record(Events.Receipts.RequestExchangeRateSuccess);
if (TextUtils.isEmpty(editText.getText())) {
editText.setText(exchangeRate.getDecimalFormattedExchangeRate(exchangeRateCurrencyCode));
} else {
Log.w(TAG, "User already started typing... Ignoring exchange rate result");
}
exchangeRateBox.setCurrentState(NetworkRequestAwareEditText.State.Success);
} else {
Log.e(TAG, "Received a null exchange rate");
getSmartReceiptsApplication().getAnalyticsManager().record(Events.Receipts.RequestExchangeRateFailedWithNull);
exchangeRateBox.setCurrentState(NetworkRequestAwareEditText.State.Failure);
}
}
@Override
public void failure(EditText editText, RetrofitError error) {
Log.e(TAG, "" + error);
getSmartReceiptsApplication().getAnalyticsManager().record(Events.Receipts.RequestExchangeRateFailed);
exchangeRateBox.setCurrentState(NetworkRequestAwareEditText.State.Failure);
}
};
mExchangeRateServiceManager.getService().getExchangeRate(dateBox.date, getString(R.string.exchange_rate_key), baseCurrencyCode, mLastExchangeRateFetchCallback);
} else {
exchangeRateBox.setCurrentState(NetworkRequestAwareEditText.State.Ready);
Log.i(TAG, "Ignoring exchange rate request, since there is no subscription for it");
}
}
private void saveReceipt() {
final String name = nameBox.getText().toString();
final Category category = mCategoriesAdpater.getItem(categoriesSpinner.getSelectedItemPosition());
final String currency = currencySpinner.getSelectedItem().toString();
if (name.length() == 0) {
Toast.makeText(getActivity(), getFlexString(R.string.DIALOG_RECEIPTMENU_TOAST_MISSING_NAME), Toast.LENGTH_SHORT).show();
return;
}
if (dateBox.date == null) {
Toast.makeText(getActivity(), getFlexString(R.string.CALENDAR_TAB_ERROR), Toast.LENGTH_SHORT).show();
return;
} else {
mReceiptInputCache.setCachedDate((Date) dateBox.date.clone());
}
mReceiptInputCache.setCachedCategory(category);
mReceiptInputCache.setCachedCurrency(currency);
if (!mTrip.isDateInsideTripBounds(dateBox.date)) {
if (isAdded()) {
Toast.makeText(getActivity(), getFlexString(R.string.DIALOG_RECEIPTMENU_TOAST_BAD_DATE), Toast.LENGTH_LONG).show();
}
}
final boolean isNewReceipt = mReceipt == null;
final ReceiptBuilderFactory builderFactory = (isNewReceipt) ? new ReceiptBuilderFactory(-1) : new ReceiptBuilderFactory(mReceipt);
builderFactory.setName(name);
builderFactory.setTrip(mTrip);
builderFactory.setDate((Date) dateBox.date.clone());
builderFactory.setPrice(priceBox.getText().toString());
builderFactory.setTax(taxBox.getText().toString());
builderFactory.setExchangeRate(new ExchangeRateBuilderFactory().setBaseCurrency(currency).setRate(mTrip.getTripCurrency(), exchangeRateBox.getText().toString()).build());
builderFactory.setCategory(category);
builderFactory.setCurrency(currency);
builderFactory.setComment(commentBox.getText().toString());
builderFactory.setPaymentMethod((PaymentMethod) (getPersistenceManager().getPreferences().getUsesPaymentMethods() ? paymentMethodsSpinner.getSelectedItem() : null));
builderFactory.setIsReimbursable(reimbursable.isChecked());
builderFactory.setIsFullPage(fullpage.isChecked());
builderFactory.setExtraEditText1((extra_edittext_box_1 == null) ? null : extra_edittext_box_1.getText().toString());
builderFactory.setExtraEditText2((extra_edittext_box_2 == null) ? null : extra_edittext_box_2.getText().toString());
builderFactory.setExtraEditText3((extra_edittext_box_3 == null) ? null : extra_edittext_box_3.getText().toString());
if (isNewReceipt) {
builderFactory.setFile(mFile);
getSmartReceiptsApplication().getAnalyticsManager().record(Events.Receipts.PersistNewReceipt);
getSmartReceiptsApplication().getTableControllerManager().getReceiptTableController().insert(builderFactory.build(), new DatabaseOperationMetadata());
getDateManager().setDateEditTextListenerDialogHolder(null);
} else {
getSmartReceiptsApplication().getAnalyticsManager().record(Events.Receipts.PersistUpdateReceipt);
getSmartReceiptsApplication().getTableControllerManager().getReceiptTableController().update(mReceipt, builderFactory.build(), new DatabaseOperationMetadata());
getDateManager().setDateEditTextListenerDialogHolder(null);
}
mNavigationHandler.navigateToReportInfoFragment(mTrip);
}
private class SpinnerSelectionListener implements AdapterView.OnItemSelectedListener {
@Override
public void onItemSelected(AdapterView<?> parentView, View selectedItemView, int position, long id) {
final Preferences preferences = getPersistenceManager().getPreferences();
if (preferences.matchNameToCategory()) {
nameBox.setText(mCategoriesAdpater.getItem(position).getName());
}
if (preferences.matchCommentToCategory()) {
commentBox.setText(mCategoriesAdpater.getItem(position).getName());
}
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
}
}
}
| SmartReceiptsLibrary/src/main/java/co/smartreceipts/android/fragments/ReceiptCreateEditFragment.java | package co.smartreceipts.android.fragments;
import android.app.Activity;
import android.content.Context;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v7.app.ActionBar;
import android.support.v7.widget.Toolbar;
import android.text.TextUtils;
import android.text.format.DateFormat;
import android.text.format.Time;
import android.text.method.TextKeyListener;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.view.inputmethod.InputMethodManager;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.AutoCompleteTextView;
import android.widget.CheckBox;
import android.widget.EditText;
import android.widget.LinearLayout;
import android.widget.Spinner;
import android.widget.Toast;
import java.io.File;
import java.sql.Date;
import java.util.Collections;
import java.util.List;
import co.smartreceipts.android.R;
import co.smartreceipts.android.activities.DefaultFragmentProvider;
import co.smartreceipts.android.activities.NavigationHandler;
import co.smartreceipts.android.activities.SmartReceiptsActivity;
import co.smartreceipts.android.adapters.TaxAutoCompleteAdapter;
import co.smartreceipts.android.analytics.events.Events;
import co.smartreceipts.android.apis.ExchangeRateServiceManager;
import co.smartreceipts.android.apis.MemoryLeakSafeCallback;
import co.smartreceipts.android.date.DateEditText;
import co.smartreceipts.android.model.Category;
import co.smartreceipts.android.model.PaymentMethod;
import co.smartreceipts.android.model.Receipt;
import co.smartreceipts.android.model.Trip;
import co.smartreceipts.android.model.factory.ExchangeRateBuilderFactory;
import co.smartreceipts.android.model.factory.ReceiptBuilderFactory;
import co.smartreceipts.android.model.gson.ExchangeRate;
import co.smartreceipts.android.persistence.DatabaseHelper;
import co.smartreceipts.android.persistence.Preferences;
import co.smartreceipts.android.persistence.database.controllers.TableEventsListener;
import co.smartreceipts.android.persistence.database.controllers.impl.StubTableEventsListener;
import co.smartreceipts.android.persistence.database.operations.DatabaseOperationMetadata;
import co.smartreceipts.android.purchases.PurchaseSource;
import co.smartreceipts.android.purchases.Subscription;
import co.smartreceipts.android.purchases.SubscriptionManager;
import co.smartreceipts.android.widget.HideSoftKeyboardOnTouchListener;
import co.smartreceipts.android.widget.NetworkRequestAwareEditText;
import co.smartreceipts.android.widget.ShowSoftKeyboardOnFocusChangeListener;
import co.smartreceipts.android.widget.UserSelectionTrackingOnItemSelectedListener;
import retrofit.RetrofitError;
import retrofit.client.Response;
import rx.android.schedulers.AndroidSchedulers;
import rx.functions.Action1;
import rx.schedulers.Schedulers;
import wb.android.autocomplete.AutoCompleteAdapter;
public class ReceiptCreateEditFragment extends WBFragment implements View.OnFocusChangeListener, NetworkRequestAwareEditText.RetryListener, DatabaseHelper.ReceiptAutoCompleteListener {
private static final String TAG = ReceiptCreateEditFragment.class.getSimpleName();
private static final String ARG_FILE = "arg_file";
private static final String KEY_OUT_STATE_IS_EXCHANGE_RATE_VISIBLE = "key_is_exchange_rate_visible";
// Metadata
private Trip mTrip;
private Receipt mReceipt;
private File mFile;
// Views
private AutoCompleteTextView nameBox;
private EditText priceBox;
private AutoCompleteTextView taxBox;
private Spinner currencySpinner;
private NetworkRequestAwareEditText exchangeRateBox;
private DateEditText dateBox;
private AutoCompleteTextView commentBox;
private Spinner categoriesSpinner;
private CheckBox reimbursable;
private CheckBox fullpage;
private Spinner paymentMethodsSpinner;
private EditText extra_edittext_box_1;
private EditText extra_edittext_box_2;
private EditText extra_edittext_box_3;
private ViewGroup mPaymentMethodsContainer;
private ViewGroup mExchangeRateContainer;
private Toolbar mToolbar;
private View mFocusedView;
// Rx
private rx.Subscription mIdSubscription;
private TableEventsListener<Category> mCategoryTableEventsListener;
private TableEventsListener<PaymentMethod> mPaymentMethodTableEventsListener;
// Misc
private MemoryLeakSafeCallback<ExchangeRate, EditText> mLastExchangeRateFetchCallback;
private NavigationHandler mNavigationHandler;
private ExchangeRateServiceManager mExchangeRateServiceManager;
private ReceiptInputCache mReceiptInputCache;
private AutoCompleteAdapter mReceiptsNameAutoCompleteAdapter, mReceiptsCommentAutoCompleteAdapter;
private ArrayAdapter<CharSequence> mCurrenciesAdapter;
private List<Category> mCategoriesList;
private ArrayAdapter<Category> mCategoriesAdpater;
private ArrayAdapter<PaymentMethod> mPaymentMethodsAdapter;
/**
* Creates a new instance of this fragment for a new receipt
*
* @param trip - the parent trip of this receipt
* @param file - the file associated with this receipt or null if we do not have one
* @return the new instance of this fragment
*/
public static ReceiptCreateEditFragment newInstance(@NonNull Trip trip, @Nullable File file) {
return newInstance(trip, null, file);
}
/**
* Creates a new instance of this fragment to edit an existing receipt
*
* @param trip - the parent trip of this receipt
* @param receiptToEdit - the receipt to edit
* @return the new instance of this fragment
*/
public static ReceiptCreateEditFragment newInstance(@NonNull Trip trip, @NonNull Receipt receiptToEdit) {
return newInstance(trip, receiptToEdit, null);
}
private static ReceiptCreateEditFragment newInstance(@NonNull Trip trip, @Nullable Receipt receiptToEdit, @Nullable File file) {
final ReceiptCreateEditFragment fragment = new ReceiptCreateEditFragment();
final Bundle args = new Bundle();
args.putParcelable(Trip.PARCEL_KEY, trip);
args.putParcelable(Receipt.PARCEL_KEY, receiptToEdit);
args.putSerializable(ARG_FILE, file);
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mTrip = getArguments().getParcelable(Trip.PARCEL_KEY);
mReceipt = getArguments().getParcelable(Receipt.PARCEL_KEY);
mFile = (File) getArguments().getSerializable(ARG_FILE);
mReceiptInputCache = new ReceiptInputCache(getFragmentManager());
mNavigationHandler = new NavigationHandler(getActivity(), new DefaultFragmentProvider());
mExchangeRateServiceManager = new ExchangeRateServiceManager(getFragmentManager());
mCurrenciesAdapter = new ArrayAdapter<>(getActivity(), android.R.layout.simple_spinner_item, getPersistenceManager().getDatabase().getCurrenciesList());
mCategoriesList = Collections.emptyList();
mCategoriesAdpater = new ArrayAdapter<>(getActivity(), android.R.layout.simple_spinner_item, Collections.<Category>emptyList());
mPaymentMethodsAdapter = new ArrayAdapter<>(getActivity(), android.R.layout.simple_spinner_item, Collections.<PaymentMethod>emptyList());
setHasOptionsMenu(true);
}
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
return inflater.inflate(R.layout.update_receipt, container, false);
}
@Override
public void onViewCreated(View rootView, @Nullable Bundle savedInstanceState) {
super.onViewCreated(rootView, savedInstanceState);
this.nameBox = (AutoCompleteTextView) getFlex().getSubView(getActivity(), rootView, R.id.DIALOG_RECEIPTMENU_NAME);
this.priceBox = (EditText) getFlex().getSubView(getActivity(), rootView, R.id.DIALOG_RECEIPTMENU_PRICE);
this.taxBox = (AutoCompleteTextView) getFlex().getSubView(getActivity(), rootView, R.id.DIALOG_RECEIPTMENU_TAX);
this.currencySpinner = (Spinner) getFlex().getSubView(getActivity(), rootView, R.id.DIALOG_RECEIPTMENU_CURRENCY);
this.exchangeRateBox = (NetworkRequestAwareEditText) getFlex().getSubView(getActivity(), rootView, R.id.DIALOG_RECEIPTMENU_EXCHANGE_RATE);
mExchangeRateContainer = (ViewGroup) getFlex().getSubView(getActivity(), rootView, R.id.exchange_rate_container);
this.dateBox = (DateEditText) getFlex().getSubView(getActivity(), rootView, R.id.DIALOG_RECEIPTMENU_DATE);
this.commentBox = (AutoCompleteTextView) getFlex().getSubView(getActivity(), rootView, R.id.DIALOG_RECEIPTMENU_COMMENT);
this.categoriesSpinner = (Spinner) getFlex().getSubView(getActivity(), rootView, R.id.DIALOG_RECEIPTMENU_CATEGORY);
this.reimbursable = (CheckBox) getFlex().getSubView(getActivity(), rootView, R.id.DIALOG_RECEIPTMENU_EXPENSABLE);
this.fullpage = (CheckBox) getFlex().getSubView(getActivity(), rootView, R.id.DIALOG_RECEIPTMENU_FULLPAGE);
this.paymentMethodsSpinner = (Spinner) getFlex().getSubView(getActivity(), rootView, R.id.dialog_receiptmenu_payment_methods_spinner);
mPaymentMethodsContainer = (ViewGroup) getFlex().getSubView(getActivity(), rootView, R.id.payment_methods_container);
// Extras
final LinearLayout extras = (LinearLayout) getFlex().getSubView(getActivity(), rootView, R.id.DIALOG_RECEIPTMENU_EXTRAS);
this.extra_edittext_box_1 = (EditText) extras.findViewWithTag(getFlexString(R.string.RECEIPTMENU_TAG_EXTRA_EDITTEXT_1));
this.extra_edittext_box_2 = (EditText) extras.findViewWithTag(getFlexString(R.string.RECEIPTMENU_TAG_EXTRA_EDITTEXT_2));
this.extra_edittext_box_3 = (EditText) extras.findViewWithTag(getFlexString(R.string.RECEIPTMENU_TAG_EXTRA_EDITTEXT_3));
// Toolbar stuff
mToolbar = (Toolbar) rootView.findViewById(R.id.toolbar);
if (mNavigationHandler.isDualPane()) {
mToolbar.setVisibility(View.GONE);
} else {
setSupportActionBar(mToolbar);
}
// Set each focus listener, so we can track the focus view across resume -> pauses
this.nameBox.setOnFocusChangeListener(this);
this.priceBox.setOnFocusChangeListener(this);
this.taxBox.setOnFocusChangeListener(this);
this.currencySpinner.setOnFocusChangeListener(this);
this.dateBox.setOnFocusChangeListener(this);
this.commentBox.setOnFocusChangeListener(this);
// Custom view properties
exchangeRateBox.setFailedHint(R.string.DIALOG_RECEIPTMENU_HINT_EXCHANGE_RATE_FAILED);
// Set click listeners
dateBox.setOnTouchListener(new HideSoftKeyboardOnTouchListener());
categoriesSpinner.setOnTouchListener(new HideSoftKeyboardOnTouchListener());
currencySpinner.setOnTouchListener(new HideSoftKeyboardOnTouchListener());
// Show default dictionary with auto-complete
nameBox.setKeyListener(TextKeyListener.getInstance(true, TextKeyListener.Capitalize.SENTENCES));
// Set-up tax layers
if (getPersistenceManager().getPreferences().includeTaxField()) {
priceBox.setHint(getFlexString(R.string.DIALOG_RECEIPTMENU_HINT_PRICE_SHORT));
taxBox.setVisibility(View.VISIBLE);
}
// Configure dropdown defaults for currencies
mCurrenciesAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
currencySpinner.setAdapter(mCurrenciesAdapter);
// And the exchange rate processing for our currencies
final boolean exchangeRateIsVisible = savedInstanceState != null && savedInstanceState.getBoolean(KEY_OUT_STATE_IS_EXCHANGE_RATE_VISIBLE);
if (exchangeRateIsVisible) {
// Note: the restoration of selected spinner items (in the currency spinner) is delayed so we use this state tracker to restore immediately
mExchangeRateContainer.setVisibility(View.VISIBLE);
}
currencySpinner.setOnItemSelectedListener(new UserSelectionTrackingOnItemSelectedListener() {
@Override
public void onUserSelectedNewItem(AdapterView<?> parent, View view, int position, long id, int previousPosition) {
// Then determine if we should show/hide the box
final String baseCurrencyCode = mCurrenciesAdapter.getItem(position).toString();
final String exchangeRateCurrencyCode = mTrip.getDefaultCurrencyCode();
if (baseCurrencyCode.equals(exchangeRateCurrencyCode)) {
mExchangeRateContainer.setVisibility(View.GONE);
exchangeRateBox.setText(""); // Clear out if we're hiding the box
} else {
mExchangeRateContainer.setVisibility(View.VISIBLE);
submitExchangeRateRequest(baseCurrencyCode);
}
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
// Intentional no-op
}
});
// Outline date defaults
dateBox.setFocusableInTouchMode(false);
dateBox.setOnClickListener(getDateManager().getDateEditTextListener());
// Lastly, preset adapters for "new" receipts
final boolean isNewReceipt = mReceipt == null;
if (isNewReceipt) {
if (getPersistenceManager().getPreferences().includeTaxField()) {
taxBox.setAdapter(new TaxAutoCompleteAdapter(getActivity(), priceBox, taxBox, getPersistenceManager().getPreferences(), getPersistenceManager().getPreferences().getDefaultTaxPercentage()));
}
}
}
@Override
public void onActivityCreated(@Nullable Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
// Configure things if it's not a restored fragment
if (savedInstanceState == null) {
final boolean isNewReceipt = mReceipt == null;
if (isNewReceipt) {
final Time now = new Time();
now.setToNow();
if (mReceiptInputCache.getCachedDate() == null) {
if (getPersistenceManager().getPreferences().defaultToFirstReportDate()) {
dateBox.date = mTrip.getStartDate();
} else {
dateBox.date = new Date(now.toMillis(false));
}
} else {
dateBox.date = mReceiptInputCache.getCachedDate();
}
dateBox.setText(DateFormat.getDateFormat(getActivity()).format(dateBox.date));
final Preferences preferences = getPersistenceManager().getPreferences();
reimbursable.setChecked(preferences.doReceiptsDefaultAsReimbursable());
if (preferences.matchCommentToCategory() && preferences.matchNameToCategory()) {
if (mFocusedView == null) {
mFocusedView = priceBox;
}
} else if (preferences.matchNameToCategory()) {
if (mFocusedView == null) {
mFocusedView = priceBox;
}
}
int idx = mCurrenciesAdapter.getPosition((mTrip != null) ? mTrip.getDefaultCurrencyCode() : preferences.getDefaultCurreny());
int cachedIdx = (mReceiptInputCache.getCachedCurrency() != null) ? mCurrenciesAdapter.getPosition(mReceiptInputCache.getCachedCurrency()) : -1;
idx = (cachedIdx > 0) ? cachedIdx : idx;
if (idx > 0) {
currencySpinner.setSelection(idx);
}
fullpage.setChecked(preferences.shouldDefaultToFullPage());
} else {
nameBox.setText(mReceipt.getName());
priceBox.setText(mReceipt.getPrice().getDecimalFormattedPrice());
dateBox.setText(mReceipt.getFormattedDate(getActivity(), getPersistenceManager().getPreferences().getDateSeparator()));
dateBox.date = mReceipt.getDate();
commentBox.setText(mReceipt.getComment());
taxBox.setText(mReceipt.getTax().getDecimalFormattedPrice());
final ExchangeRate exchangeRate = mReceipt.getPrice().getExchangeRate();
if (exchangeRate.supportsExchangeRateFor(mTrip.getDefaultCurrencyCode())) {
exchangeRateBox.setText(exchangeRate.getDecimalFormattedExchangeRate(mTrip.getDefaultCurrencyCode()));
}
int idx = mCurrenciesAdapter.getPosition(mReceipt.getPrice().getCurrencyCode());
if (idx > 0) {
currencySpinner.setSelection(idx);
}
if (mReceipt.getPrice().getCurrency().equals(mTrip.getPrice().getCurrency())) {
mExchangeRateContainer.setVisibility(View.GONE);
} else {
mExchangeRateContainer.setVisibility(View.VISIBLE);
}
reimbursable.setChecked(mReceipt.isReimbursable());
fullpage.setChecked(mReceipt.isFullPage());
if (extra_edittext_box_1 != null && mReceipt.hasExtraEditText1()) {
extra_edittext_box_1.setText(mReceipt.getExtraEditText1());
}
if (extra_edittext_box_2 != null && mReceipt.hasExtraEditText2()) {
extra_edittext_box_2.setText(mReceipt.getExtraEditText2());
}
if (extra_edittext_box_3 != null && mReceipt.hasExtraEditText3()) {
extra_edittext_box_3.setText(mReceipt.getExtraEditText3());
}
}
// Focused View
if (mFocusedView == null) {
mFocusedView = nameBox;
}
}
// Configure items that require callbacks
mCategoryTableEventsListener = new StubTableEventsListener<Category>() {
@Override
public void onGetSuccess(@NonNull List<Category> list) {
if (isAdded()) {
mCategoriesList = list;
mCategoriesAdpater = new ArrayAdapter<>(getActivity(), android.R.layout.simple_spinner_item, list);
mCategoriesAdpater.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
categoriesSpinner.setAdapter(mCategoriesAdpater);
if (mReceipt == null) {
final Preferences preferences = getPersistenceManager().getPreferences();
if (preferences.matchCommentToCategory() || preferences.matchNameToCategory()) {
categoriesSpinner.setOnItemSelectedListener(new SpinnerSelectionListener());
}
if (preferences.predictCategories()) { // Predict Breakfast, Lunch, Dinner by the hour
if (mReceiptInputCache.getCachedCategory() == null) {
final Time now = new Time();
now.setToNow();
String nameToIndex = null;
if (now.hour >= 4 && now.hour < 11) { // Breakfast hours
nameToIndex = getString(R.string.category_breakfast);
} else if (now.hour >= 11 && now.hour < 16) { // Lunch hours
nameToIndex = getString(R.string.category_lunch);
} else if (now.hour >= 16 && now.hour < 23) { // Dinner hours
nameToIndex = getString(R.string.category_dinner);
}
if (nameToIndex != null) {
for (int i = 0; i < mCategoriesAdpater.getCount(); i++) {
if (nameToIndex.equals(mCategoriesAdpater.getItem(i).getName())) {
categoriesSpinner.setSelection(i);
break; // Exit loop now
}
}
}
} else {
int idx = mCategoriesAdpater.getPosition(mReceiptInputCache.getCachedCategory());
if (idx > 0) {
categoriesSpinner.setSelection(idx);
}
}
}
} else {
categoriesSpinner.setSelection(mCategoriesAdpater.getPosition(mReceipt.getCategory()));
}
}
}
};
mPaymentMethodTableEventsListener = new StubTableEventsListener<PaymentMethod>() {
@Override
public void onGetSuccess(@NonNull List<PaymentMethod> list) {
if (isAdded()) {
mPaymentMethodsAdapter = new ArrayAdapter<>(getActivity(), android.R.layout.simple_spinner_item, list);
mPaymentMethodsAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
paymentMethodsSpinner.setAdapter(mPaymentMethodsAdapter);
if (getPersistenceManager().getPreferences().getUsesPaymentMethods()) {
mPaymentMethodsContainer.setVisibility(View.VISIBLE);
if (mReceipt != null) {
final PaymentMethod oldPaymentMethod = mReceipt.getPaymentMethod();
if (oldPaymentMethod != null) {
final int paymentIdx = mPaymentMethodsAdapter.getPosition(oldPaymentMethod);
if (paymentIdx > 0) {
paymentMethodsSpinner.setSelection(paymentIdx);
}
}
}
}
}
}
};
getSmartReceiptsApplication().getTableControllerManager().getCategoriesTableController().subscribe(mCategoryTableEventsListener);
getSmartReceiptsApplication().getTableControllerManager().getPaymentMethodsTableController().subscribe(mPaymentMethodTableEventsListener);
getSmartReceiptsApplication().getTableControllerManager().getCategoriesTableController().get();
getSmartReceiptsApplication().getTableControllerManager().getPaymentMethodsTableController().get();
}
@Override
public void onResume() {
super.onResume();
final boolean isNewReceipt = mReceipt == null;
final String title;
if (isNewReceipt) {
title = getFlexString(R.string.DIALOG_RECEIPTMENU_TITLE_NEW);
} else {
if (getPersistenceManager().getPreferences().isShowReceiptID()) {
title = String.format(getFlexString(R.string.DIALOG_RECEIPTMENU_TITLE_EDIT_ID), mReceipt.getId());
} else {
title = getFlexString(R.string.DIALOG_RECEIPTMENU_TITLE_EDIT);
}
}
final ActionBar actionBar = getSupportActionBar();
if (actionBar != null) {
actionBar.setHomeButtonEnabled(true);
actionBar.setDisplayHomeAsUpEnabled(true);
actionBar.setHomeAsUpIndicator(R.drawable.ic_action_cancel);
actionBar.setTitle(title);
actionBar.setSubtitle("");
}
if (isNewReceipt && getPersistenceManager().getPreferences().isShowReceiptID()) {
mIdSubscription = getPersistenceManager().getDatabase().getNextReceiptAutoIncremenetIdHelper()
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Action1<Integer>() {
@Override
public void call(Integer integer) {
if (isResumed()) {
final ActionBar actionBar = getSupportActionBar();
if (actionBar != null) {
final String titleWithId = String.format(getFlexString(R.string.DIALOG_RECEIPTMENU_TITLE_NEW_ID), integer);
actionBar.setTitle(titleWithId);
}
}
}
});
}
if (isNewReceipt) {
if (getPersistenceManager().getPreferences().enableAutoCompleteSuggestions()) {
final DatabaseHelper db = getPersistenceManager().getDatabase();
if (mReceiptsNameAutoCompleteAdapter == null) {
mReceiptsNameAutoCompleteAdapter = AutoCompleteAdapter.getInstance(getActivity(), DatabaseHelper.TAG_RECEIPTS_NAME, db, db);
} else {
mReceiptsNameAutoCompleteAdapter.reset();
}
if (mReceiptsCommentAutoCompleteAdapter == null) {
mReceiptsCommentAutoCompleteAdapter = AutoCompleteAdapter.getInstance(getActivity(), DatabaseHelper.TAG_RECEIPTS_COMMENT, db);
} else {
mReceiptsCommentAutoCompleteAdapter.reset();
}
nameBox.setAdapter(mReceiptsNameAutoCompleteAdapter);
commentBox.setAdapter(mReceiptsCommentAutoCompleteAdapter);
}
}
if (mFocusedView != null) {
mFocusedView.requestFocus(); // Make sure we're focused on the right view
}
exchangeRateBox.setRetryListener(this);
getPersistenceManager().getDatabase().registerReceiptAutoCompleteListener(this);
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
inflater.inflate(R.menu.menu_save, menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == android.R.id.home) {
mNavigationHandler.navigateToReportInfoFragment(mTrip);
return true;
}
if (item.getItemId() == R.id.action_save) {
saveReceipt();
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public void onFocusChange(View v, boolean hasFocus) {
mFocusedView = hasFocus ? v : null;
if (mReceipt == null) {
// Only launch if we have focus and it's a new receipt
new ShowSoftKeyboardOnFocusChangeListener().onFocusChange(v, hasFocus);
}
}
@Override
public void onUserRetry() {
if (getPersistenceManager().getSubscriptionCache().getSubscriptionWallet().hasSubscription(Subscription.SmartReceiptsPlus)) {
Log.i(TAG, "Attempting to retry with valid subscription. Submitting request directly");
submitExchangeRateRequest((String) currencySpinner.getSelectedItem());
} else {
Log.i(TAG, "Attempting to retry without valid subscription. Directing user to purchase intent");
final Activity activity = getActivity();
if (activity instanceof SmartReceiptsActivity) {
final SmartReceiptsActivity smartReceiptsActivity = (SmartReceiptsActivity) activity;
final SubscriptionManager subscriptionManager = smartReceiptsActivity.getSubscriptionManager();
if (subscriptionManager != null) {
subscriptionManager.queryBuyIntent(Subscription.SmartReceiptsPlus, PurchaseSource.ExchangeRate);
}
}
}
}
@Override
public void onReceiptRowAutoCompleteQueryResult(String name, String price, String category) {
if (isAdded()) {
if (nameBox != null && name != null) {
nameBox.setText(name);
nameBox.setSelection(name.length());
}
if (priceBox != null && price != null && priceBox.getText().length() == 0) {
priceBox.setText(price);
}
if (categoriesSpinner != null && category != null) {
for (int i = 0; i < mCategoriesList.size(); i++) {
if (category.equals(mCategoriesList.get(i).getName())) {
categoriesSpinner.setSelection(mCategoriesList.indexOf(mCategoriesList.get(i)));
break;
}
}
}
}
}
@Override
public void onPause() {
// Notify the downstream adapters
if (mReceiptsNameAutoCompleteAdapter != null) {
mReceiptsNameAutoCompleteAdapter.onPause();
}
if (mReceiptsCommentAutoCompleteAdapter != null) {
mReceiptsCommentAutoCompleteAdapter.onPause();
}
if (mIdSubscription != null) {
mIdSubscription.unsubscribe();
mIdSubscription = null;
}
// Dismiss the soft keyboard
final InputMethodManager inputMethodManager = (InputMethodManager) getContext().getSystemService(Context.INPUT_METHOD_SERVICE);
if (inputMethodManager != null) {
if (mFocusedView != null) {
inputMethodManager.hideSoftInputFromWindow(mFocusedView.getWindowToken(), 0);
} else {
Log.w(TAG, "Unable to dismiss soft keyboard due to a null view");
}
}
exchangeRateBox.setRetryListener(null);
getPersistenceManager().getDatabase().unregisterReceiptAutoCompleteListener();
super.onPause();
}
@Override
public void onSaveInstanceState(Bundle outState) {
if (mExchangeRateContainer != null && outState != null) {
outState.putBoolean(KEY_OUT_STATE_IS_EXCHANGE_RATE_VISIBLE, mExchangeRateContainer.getVisibility() == View.VISIBLE);
}
super.onSaveInstanceState(outState);
}
@Override
public void onDestroy() {
getSmartReceiptsApplication().getTableControllerManager().getCategoriesTableController().unsubscribe(mCategoryTableEventsListener);
getSmartReceiptsApplication().getTableControllerManager().getPaymentMethodsTableController().unsubscribe(mPaymentMethodTableEventsListener);
super.onDestroy();
}
private synchronized void submitExchangeRateRequest(@NonNull String baseCurrencyCode) {
exchangeRateBox.setText(""); // Clear results to avoid stale data here
if (getPersistenceManager().getSubscriptionCache().getSubscriptionWallet().hasSubscription(Subscription.SmartReceiptsPlus)) {
Log.i(TAG, "Submitting exchange rate request");
getSmartReceiptsApplication().getAnalyticsManager().record(Events.Receipts.RequestExchangeRate);
final String exchangeRateCurrencyCode = mTrip.getDefaultCurrencyCode();
exchangeRateBox.setCurrentState(NetworkRequestAwareEditText.State.Loading);
if (mLastExchangeRateFetchCallback != null) {
// Ignore any outstanding results to not confuse ourselves
mLastExchangeRateFetchCallback.ignoreResult();
}
mLastExchangeRateFetchCallback = new MemoryLeakSafeCallback<ExchangeRate, EditText>(exchangeRateBox) {
@Override
public void success(EditText editText, ExchangeRate exchangeRate, Response response) {
if (exchangeRate != null && exchangeRate.supportsExchangeRateFor(exchangeRateCurrencyCode)) {
getSmartReceiptsApplication().getAnalyticsManager().record(Events.Receipts.RequestExchangeRateSuccess);
if (TextUtils.isEmpty(editText.getText())) {
editText.setText(exchangeRate.getDecimalFormattedExchangeRate(exchangeRateCurrencyCode));
} else {
Log.w(TAG, "User already started typing... Ignoring exchange rate result");
}
exchangeRateBox.setCurrentState(NetworkRequestAwareEditText.State.Success);
} else {
Log.e(TAG, "Received a null exchange rate");
getSmartReceiptsApplication().getAnalyticsManager().record(Events.Receipts.RequestExchangeRateFailedWithNull);
exchangeRateBox.setCurrentState(NetworkRequestAwareEditText.State.Failure);
}
}
@Override
public void failure(EditText editText, RetrofitError error) {
Log.e(TAG, "" + error);
getSmartReceiptsApplication().getAnalyticsManager().record(Events.Receipts.RequestExchangeRateFailed);
exchangeRateBox.setCurrentState(NetworkRequestAwareEditText.State.Failure);
}
};
mExchangeRateServiceManager.getService().getExchangeRate(dateBox.date, getString(R.string.exchange_rate_key), baseCurrencyCode, mLastExchangeRateFetchCallback);
} else {
exchangeRateBox.setCurrentState(NetworkRequestAwareEditText.State.Ready);
Log.i(TAG, "Ignoring exchange rate request, since there is no subscription for it");
}
}
private void saveReceipt() {
final String name = nameBox.getText().toString();
final Category category = mCategoriesAdpater.getItem(categoriesSpinner.getSelectedItemPosition());
final String currency = currencySpinner.getSelectedItem().toString();
if (name.length() == 0) {
Toast.makeText(getActivity(), getFlexString(R.string.DIALOG_RECEIPTMENU_TOAST_MISSING_NAME), Toast.LENGTH_SHORT).show();
return;
}
if (dateBox.date == null) {
Toast.makeText(getActivity(), getFlexString(R.string.CALENDAR_TAB_ERROR), Toast.LENGTH_SHORT).show();
return;
} else {
mReceiptInputCache.setCachedDate((Date) dateBox.date.clone());
}
mReceiptInputCache.setCachedCategory(category);
mReceiptInputCache.setCachedCurrency(currency);
if (!mTrip.isDateInsideTripBounds(dateBox.date)) {
if (isAdded()) {
Toast.makeText(getActivity(), getFlexString(R.string.DIALOG_RECEIPTMENU_TOAST_BAD_DATE), Toast.LENGTH_LONG).show();
}
}
final boolean isNewReceipt = mReceipt == null;
final ReceiptBuilderFactory builderFactory = (isNewReceipt) ? new ReceiptBuilderFactory(-1) : new ReceiptBuilderFactory(mReceipt);
builderFactory.setName(name);
builderFactory.setTrip(mTrip);
builderFactory.setDate((Date) dateBox.date.clone());
builderFactory.setPrice(priceBox.getText().toString());
builderFactory.setTax(taxBox.getText().toString());
builderFactory.setExchangeRate(new ExchangeRateBuilderFactory().setBaseCurrency(currency).setRate(mTrip.getTripCurrency(), exchangeRateBox.getText().toString()).build());
builderFactory.setCategory(category);
builderFactory.setCurrency(currency);
builderFactory.setComment(commentBox.getText().toString());
builderFactory.setPaymentMethod((PaymentMethod) (getPersistenceManager().getPreferences().getUsesPaymentMethods() ? paymentMethodsSpinner.getSelectedItem() : null));
builderFactory.setIsReimbursable(reimbursable.isChecked());
builderFactory.setIsFullPage(fullpage.isChecked());
builderFactory.setExtraEditText1((extra_edittext_box_1 == null) ? null : extra_edittext_box_1.getText().toString());
builderFactory.setExtraEditText2((extra_edittext_box_2 == null) ? null : extra_edittext_box_2.getText().toString());
builderFactory.setExtraEditText3((extra_edittext_box_3 == null) ? null : extra_edittext_box_3.getText().toString());
if (isNewReceipt) {
builderFactory.setFile(mFile);
getSmartReceiptsApplication().getAnalyticsManager().record(Events.Receipts.PersistNewReceipt);
getSmartReceiptsApplication().getTableControllerManager().getReceiptTableController().insert(builderFactory.build(), new DatabaseOperationMetadata());
getDateManager().setDateEditTextListenerDialogHolder(null);
} else {
getSmartReceiptsApplication().getAnalyticsManager().record(Events.Receipts.PersistUpdateReceipt);
getSmartReceiptsApplication().getTableControllerManager().getReceiptTableController().update(mReceipt, builderFactory.build(), new DatabaseOperationMetadata());
getDateManager().setDateEditTextListenerDialogHolder(null);
}
mNavigationHandler.navigateToReportInfoFragment(mTrip);
}
private class SpinnerSelectionListener implements AdapterView.OnItemSelectedListener {
@Override
public void onItemSelected(AdapterView<?> parentView, View selectedItemView, int position, long id) {
final Preferences preferences = getPersistenceManager().getPreferences();
if (preferences.matchNameToCategory()) {
nameBox.setText(mCategoriesAdpater.getItem(position).getName());
}
if (preferences.matchCommentToCategory()) {
commentBox.setText(mCategoriesAdpater.getItem(position).getName());
}
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
}
}
}
| Fixed a bug in which the exchange rates failed to apply if cached
| SmartReceiptsLibrary/src/main/java/co/smartreceipts/android/fragments/ReceiptCreateEditFragment.java | Fixed a bug in which the exchange rates failed to apply if cached | <ide><path>martReceiptsLibrary/src/main/java/co/smartreceipts/android/fragments/ReceiptCreateEditFragment.java
<ide> public void onUserSelectedNewItem(AdapterView<?> parent, View view, int position, long id, int previousPosition) {
<ide> // Then determine if we should show/hide the box
<ide> final String baseCurrencyCode = mCurrenciesAdapter.getItem(position).toString();
<del> final String exchangeRateCurrencyCode = mTrip.getDefaultCurrencyCode();
<del> if (baseCurrencyCode.equals(exchangeRateCurrencyCode)) {
<del> mExchangeRateContainer.setVisibility(View.GONE);
<del> exchangeRateBox.setText(""); // Clear out if we're hiding the box
<del> } else {
<del> mExchangeRateContainer.setVisibility(View.VISIBLE);
<del> submitExchangeRateRequest(baseCurrencyCode);
<del> }
<add> configureExchangeRateField(baseCurrencyCode);
<ide> }
<ide>
<ide> @Override
<ide>
<ide> int idx = mCurrenciesAdapter.getPosition((mTrip != null) ? mTrip.getDefaultCurrencyCode() : preferences.getDefaultCurreny());
<ide> int cachedIdx = (mReceiptInputCache.getCachedCurrency() != null) ? mCurrenciesAdapter.getPosition(mReceiptInputCache.getCachedCurrency()) : -1;
<del> idx = (cachedIdx > 0) ? cachedIdx : idx;
<del> if (idx > 0) {
<add> idx = (cachedIdx >= 0) ? cachedIdx : idx;
<add> if (idx >= 0) {
<ide> currencySpinner.setSelection(idx);
<add> }
<add> if (!mTrip.getDefaultCurrencyCode().equals(mReceiptInputCache.getCachedCurrency())) {
<add> configureExchangeRateField(mReceiptInputCache.getCachedCurrency());
<ide> }
<ide> fullpage.setChecked(preferences.shouldDefaultToFullPage());
<ide>
<ide> super.onDestroy();
<ide> }
<ide>
<add> private void configureExchangeRateField(@Nullable String baseCurrencyCode) {
<add> final String exchangeRateCurrencyCode = mTrip.getDefaultCurrencyCode();
<add> if (exchangeRateCurrencyCode.equals(baseCurrencyCode) || baseCurrencyCode == null) {
<add> mExchangeRateContainer.setVisibility(View.GONE);
<add> exchangeRateBox.setText(""); // Clear out if we're hiding the box
<add> } else {
<add> mExchangeRateContainer.setVisibility(View.VISIBLE);
<add> submitExchangeRateRequest(baseCurrencyCode);
<add> }
<add> }
<add>
<ide> private synchronized void submitExchangeRateRequest(@NonNull String baseCurrencyCode) {
<ide> exchangeRateBox.setText(""); // Clear results to avoid stale data here
<ide> if (getPersistenceManager().getSubscriptionCache().getSubscriptionWallet().hasSubscription(Subscription.SmartReceiptsPlus)) { |
|
Java | apache-2.0 | a10ccb7550694a8c7a2d0372e838d2b5cc903f69 | 0 | chamikaramj/beam,chamikaramj/beam,robertwb/incubator-beam,apache/beam,lukecwik/incubator-beam,chamikaramj/beam,chamikaramj/beam,chamikaramj/beam,lukecwik/incubator-beam,robertwb/incubator-beam,robertwb/incubator-beam,robertwb/incubator-beam,apache/beam,chamikaramj/beam,apache/beam,chamikaramj/beam,apache/beam,lukecwik/incubator-beam,chamikaramj/beam,apache/beam,robertwb/incubator-beam,lukecwik/incubator-beam,robertwb/incubator-beam,apache/beam,lukecwik/incubator-beam,robertwb/incubator-beam,lukecwik/incubator-beam,apache/beam,lukecwik/incubator-beam,chamikaramj/beam,lukecwik/incubator-beam,lukecwik/incubator-beam,robertwb/incubator-beam,lukecwik/incubator-beam,apache/beam,robertwb/incubator-beam,apache/beam,apache/beam,robertwb/incubator-beam,chamikaramj/beam,apache/beam | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.examples.snippets.transforms.io.gcp.bigquery;
import static org.junit.Assert.assertEquals;
import com.google.api.services.bigquery.model.TableRow;
import com.google.api.services.bigquery.model.TableSchema;
import com.google.cloud.bigquery.BigQuery;
import com.google.cloud.bigquery.BigQuery.DatasetDeleteOption;
import com.google.cloud.bigquery.BigQueryOptions;
import com.google.cloud.bigquery.DatasetId;
import com.google.cloud.bigquery.DatasetInfo;
import com.google.cloud.bigquery.FieldValue;
import com.google.cloud.bigquery.FieldValueList;
import com.google.cloud.bigquery.QueryJobConfiguration;
import java.math.BigDecimal;
import java.security.SecureRandom;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.util.Arrays;
import java.util.Base64;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import org.apache.beam.examples.snippets.transforms.io.gcp.bigquery.BigQueryMyData.MyData;
import org.apache.beam.examples.snippets.transforms.io.gcp.bigquery.BigQueryMyData.MyStruct;
import org.apache.beam.sdk.extensions.gcp.options.GcpOptions;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.Create;
import org.apache.beam.sdk.transforms.FlatMapElements;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.TypeDescriptors;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Integration tests for BigQuery samples.
*
* <p>To run locally:
*
* <pre>{@code
* ./gradlew integrationTest -p examples/java/ --info \
* --tests org.apache.beam.examples.snippets.transforms.io.gcp.bigquery.BigQuerySamplesIT \
* -DintegrationTestPipelineOptions='["--tempLocation=gs://YOUR-BUCKET/temp"]' \
* -DintegrationTestRunner=direct
* }</pre>
*/
@RunWith(JUnit4.class)
public class BigQuerySamplesIT {
private static final String PROJECT =
TestPipeline.testingPipelineOptions().as(GcpOptions.class).getProject();
private static final BigQuery BIGQUERY =
BigQueryOptions.newBuilder().setProjectId(PROJECT).build().getService();
private static final String DATASET =
"beam_bigquery_samples_" + System.currentTimeMillis() + "_" + new SecureRandom().nextInt(32);
@Rule public final transient TestPipeline writePipeline = TestPipeline.create();
@Rule public final transient TestPipeline readTablePipeline = TestPipeline.create();
@Rule public final transient TestPipeline readQueryPipeline = TestPipeline.create();
@Rule public final transient TestPipeline readBQStorageAPIPipeline = TestPipeline.create();
@Rule public final TestName testName = new TestName();
@BeforeClass
public static void beforeAll() throws Exception {
BIGQUERY.create(DatasetInfo.newBuilder(PROJECT, DATASET).build());
}
@AfterClass
public static void afterAll() {
BIGQUERY.delete(DatasetId.of(PROJECT, DATASET), DatasetDeleteOption.deleteContents());
}
@Test
public void testTableIO() throws Exception {
String table = testName.getMethodName();
// ===--- Test 1: createTableRow + writeToTable ---===\\
// The rest of the tests depend on this since this is the one that writes
// the contents into the BigQuery table, which the other tests then read.
TableSchema schema = BigQuerySchemaCreate.createSchema();
PCollection<TableRow> rows =
writePipeline.apply(Create.of(Arrays.asList(BigQueryTableRowCreate.createTableRow())));
BigQueryWriteToTable.writeToTable(PROJECT, DATASET, table, schema, rows);
writePipeline.run().waitUntilFinish();
// Check that the BigQuery table has the data using the BigQuery Client Library.
String query = String.format("SELECT * FROM `%s.%s.%s`", PROJECT, DATASET, table);
List<String> queryResults =
StreamSupport.stream(
BIGQUERY.query(QueryJobConfiguration.of(query)).iterateAll().spliterator(), false)
.flatMap(values -> fieldValueListToStrings(values).stream())
.collect(Collectors.toList());
assertEquals(expected, queryResults);
// ===--- Test 2: readFromTable ---=== \\
readAndCheck(BigQueryReadFromTable.readFromTable(PROJECT, DATASET, table, readTablePipeline));
readTablePipeline.run().waitUntilFinish();
// ===--- Test 3: readFromQuery ---=== \\
readAndCheck(BigQueryReadFromQuery.readFromQuery(PROJECT, DATASET, table, readQueryPipeline));
readQueryPipeline.run().waitUntilFinish();
// ===--- Test 4: readFromTableWithBigQueryStorageAPI ---=== \\
readAndCheck(
BigQueryReadFromTableWithBigQueryStorageAPI.readFromTableWithBigQueryStorageAPI(
PROJECT, DATASET, table, readBQStorageAPIPipeline));
readBQStorageAPIPipeline.run().waitUntilFinish();
}
// -- Helper methods -- \\
private static void readAndCheck(PCollection<MyData> rows) {
PCollection<String> contents =
rows.apply(
FlatMapElements.into(TypeDescriptors.strings())
.via(BigQuerySamplesIT::myDataToStrings));
PAssert.that(contents).containsInAnyOrder(expected);
}
private static List<String> expected =
Arrays.asList(
"string: UTF-8 strings are supported! ������",
"int64: 432",
"float64: 3.14159265",
"numeric: 1234.56",
"bool: true",
"bytes: VVRGLTggYnl0ZSBzdHJpbmcg8J+MsfCfjLPwn4yN",
"date: 2020-03-19",
"datetime: 2020-03-19T20:41:25.123",
"time: 20:41:25.123",
"timestamp: 2020-03-20T03:41:42.123Z",
"geography: POINT(30 10)",
"array: [1, 2, 3, 4]",
"struct: {string: Text ������, int64: 42}");
private static List<String> myDataToStrings(MyData data) {
return Arrays.asList(
String.format("string: %s", data.myString),
String.format("int64: %d", data.myInt64),
String.format("float64: %.8f", data.myFloat64),
String.format("numeric: %.2f", data.myNumeric.doubleValue()),
String.format("bool: %s", data.myBoolean),
String.format("bytes: %s", Base64.getEncoder().encodeToString(data.myBytes)),
String.format("date: %s", data.myDate),
String.format("datetime: %s", data.myDateTime),
String.format("time: %s", data.myTime),
String.format("timestamp: %s", data.myTimestamp),
String.format("geography: %s", data.myGeography),
String.format("array: %s", data.myArray),
String.format(
"struct: {string: %s, int64: %s}",
data.myStruct.stringValue, data.myStruct.int64Value));
}
private static List<String> fieldValueListToStrings(FieldValueList row) {
MyData data = new MyData();
data.myString = row.get("string_field").getStringValue();
data.myInt64 = row.get("int64_field").getLongValue();
data.myFloat64 = row.get("float64_field").getDoubleValue();
data.myNumeric = new BigDecimal(row.get("numeric_field").getDoubleValue());
data.myBoolean = row.get("bool_field").getBooleanValue();
data.myBytes = Base64.getDecoder().decode(row.get("bytes_field").getStringValue());
data.myDate = LocalDate.parse(row.get("date_field").getStringValue()).toString();
data.myDateTime = LocalDateTime.parse(row.get("datetime_field").getStringValue()).toString();
data.myTime = LocalTime.parse(row.get("time_field").getStringValue()).toString();
data.myTimestamp =
Instant.ofEpochMilli(
(long) (Double.parseDouble(row.get("timestamp_field").getStringValue()) * 1000.0))
.toString();
data.myGeography = row.get("geography_field").getStringValue();
data.myArray =
row.get("array_field").getRepeatedValue().stream()
.map(FieldValue::getLongValue)
.collect(Collectors.toList());
FieldValueList structValues = row.get("struct_field").getRecordValue();
data.myStruct = new MyStruct();
data.myStruct.stringValue = structValues.get(0).getStringValue();
data.myStruct.int64Value = structValues.get(1).getLongValue();
return myDataToStrings(data);
}
}
| examples/java/src/test/java/org/apache/beam/examples/snippets/transforms/io/gcp/bigquery/BigQuerySamplesIT.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.examples.snippets.transforms.io.gcp.bigquery;
import static org.junit.Assert.assertEquals;
import com.google.api.services.bigquery.model.TableRow;
import com.google.api.services.bigquery.model.TableSchema;
import com.google.cloud.bigquery.BigQuery;
import com.google.cloud.bigquery.BigQuery.DatasetDeleteOption;
import com.google.cloud.bigquery.BigQueryOptions;
import com.google.cloud.bigquery.DatasetId;
import com.google.cloud.bigquery.DatasetInfo;
import com.google.cloud.bigquery.FieldValue;
import com.google.cloud.bigquery.FieldValueList;
import com.google.cloud.bigquery.QueryJobConfiguration;
import java.math.BigDecimal;
import java.security.SecureRandom;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.util.Arrays;
import java.util.Base64;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import org.apache.beam.examples.snippets.transforms.io.gcp.bigquery.BigQueryMyData.MyData;
import org.apache.beam.examples.snippets.transforms.io.gcp.bigquery.BigQueryMyData.MyStruct;
import org.apache.beam.sdk.extensions.gcp.options.GcpOptions;
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.testing.TestPipeline;
import org.apache.beam.sdk.transforms.Create;
import org.apache.beam.sdk.transforms.FlatMapElements;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.TypeDescriptors;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Integration tests for BigQuery samples.
*
* <p>To run locally:
*
* <pre>{@code
* ./gradlew integrationTest -p examples/java/ --info \
* --tests org.apache.beam.examples.snippets.transforms.io.gcp.bigquery.BigQuerySamplesIT \
* -DintegrationTestPipelineOptions='["--tempLocation=gs://YOUR-BUCKET/temp"]' \
* -DintegrationTestRunner=direct
* }</pre>
*/
@RunWith(JUnit4.class)
public class BigQuerySamplesIT {
private static final String PROJECT =
TestPipeline.testingPipelineOptions().as(GcpOptions.class).getProject();
private static final BigQuery BIGQUERY = BigQueryOptions.getDefaultInstance().getService();
private static final String DATASET =
"beam_bigquery_samples_" + System.currentTimeMillis() + "_" + new SecureRandom().nextInt(32);
@Rule public final transient TestPipeline writePipeline = TestPipeline.create();
@Rule public final transient TestPipeline readTablePipeline = TestPipeline.create();
@Rule public final transient TestPipeline readQueryPipeline = TestPipeline.create();
@Rule public final transient TestPipeline readBQStorageAPIPipeline = TestPipeline.create();
@Rule public final TestName testName = new TestName();
@BeforeClass
public static void beforeAll() throws Exception {
BIGQUERY.create(DatasetInfo.newBuilder(PROJECT, DATASET).build());
}
@AfterClass
public static void afterAll() {
BIGQUERY.delete(DatasetId.of(PROJECT, DATASET), DatasetDeleteOption.deleteContents());
}
@Test
public void testTableIO() throws Exception {
String table = testName.getMethodName();
// ===--- Test 1: createTableRow + writeToTable ---===\\
// The rest of the tests depend on this since this is the one that writes
// the contents into the BigQuery table, which the other tests then read.
TableSchema schema = BigQuerySchemaCreate.createSchema();
PCollection<TableRow> rows =
writePipeline.apply(Create.of(Arrays.asList(BigQueryTableRowCreate.createTableRow())));
BigQueryWriteToTable.writeToTable(PROJECT, DATASET, table, schema, rows);
writePipeline.run().waitUntilFinish();
// Check that the BigQuery table has the data using the BigQuery Client Library.
String query = String.format("SELECT * FROM `%s.%s.%s`", PROJECT, DATASET, table);
List<String> queryResults =
StreamSupport.stream(
BIGQUERY.query(QueryJobConfiguration.of(query)).iterateAll().spliterator(), false)
.flatMap(values -> fieldValueListToStrings(values).stream())
.collect(Collectors.toList());
assertEquals(expected, queryResults);
// ===--- Test 2: readFromTable ---=== \\
readAndCheck(BigQueryReadFromTable.readFromTable(PROJECT, DATASET, table, readTablePipeline));
readTablePipeline.run().waitUntilFinish();
// ===--- Test 3: readFromQuery ---=== \\
readAndCheck(BigQueryReadFromQuery.readFromQuery(PROJECT, DATASET, table, readQueryPipeline));
readQueryPipeline.run().waitUntilFinish();
// ===--- Test 4: readFromTableWithBigQueryStorageAPI ---=== \\
readAndCheck(
BigQueryReadFromTableWithBigQueryStorageAPI.readFromTableWithBigQueryStorageAPI(
PROJECT, DATASET, table, readBQStorageAPIPipeline));
readBQStorageAPIPipeline.run().waitUntilFinish();
}
// -- Helper methods -- \\
private static void readAndCheck(PCollection<MyData> rows) {
PCollection<String> contents =
rows.apply(
FlatMapElements.into(TypeDescriptors.strings())
.via(BigQuerySamplesIT::myDataToStrings));
PAssert.that(contents).containsInAnyOrder(expected);
}
private static List<String> expected =
Arrays.asList(
"string: UTF-8 strings are supported! ������",
"int64: 432",
"float64: 3.14159265",
"numeric: 1234.56",
"bool: true",
"bytes: VVRGLTggYnl0ZSBzdHJpbmcg8J+MsfCfjLPwn4yN",
"date: 2020-03-19",
"datetime: 2020-03-19T20:41:25.123",
"time: 20:41:25.123",
"timestamp: 2020-03-20T03:41:42.123Z",
"geography: POINT(30 10)",
"array: [1, 2, 3, 4]",
"struct: {string: Text ������, int64: 42}");
private static List<String> myDataToStrings(MyData data) {
return Arrays.asList(
String.format("string: %s", data.myString),
String.format("int64: %d", data.myInt64),
String.format("float64: %.8f", data.myFloat64),
String.format("numeric: %.2f", data.myNumeric.doubleValue()),
String.format("bool: %s", data.myBoolean),
String.format("bytes: %s", Base64.getEncoder().encodeToString(data.myBytes)),
String.format("date: %s", data.myDate),
String.format("datetime: %s", data.myDateTime),
String.format("time: %s", data.myTime),
String.format("timestamp: %s", data.myTimestamp),
String.format("geography: %s", data.myGeography),
String.format("array: %s", data.myArray),
String.format(
"struct: {string: %s, int64: %s}",
data.myStruct.stringValue, data.myStruct.int64Value));
}
private static List<String> fieldValueListToStrings(FieldValueList row) {
MyData data = new MyData();
data.myString = row.get("string_field").getStringValue();
data.myInt64 = row.get("int64_field").getLongValue();
data.myFloat64 = row.get("float64_field").getDoubleValue();
data.myNumeric = new BigDecimal(row.get("numeric_field").getDoubleValue());
data.myBoolean = row.get("bool_field").getBooleanValue();
data.myBytes = Base64.getDecoder().decode(row.get("bytes_field").getStringValue());
data.myDate = LocalDate.parse(row.get("date_field").getStringValue()).toString();
data.myDateTime = LocalDateTime.parse(row.get("datetime_field").getStringValue()).toString();
data.myTime = LocalTime.parse(row.get("time_field").getStringValue()).toString();
data.myTimestamp =
Instant.ofEpochMilli(
(long) (Double.parseDouble(row.get("timestamp_field").getStringValue()) * 1000.0))
.toString();
data.myGeography = row.get("geography_field").getStringValue();
data.myArray =
row.get("array_field").getRepeatedValue().stream()
.map(FieldValue::getLongValue)
.collect(Collectors.toList());
FieldValueList structValues = row.get("struct_field").getRecordValue();
data.myStruct = new MyStruct();
data.myStruct.stringValue = structValues.get(0).getStringValue();
data.myStruct.int64Value = structValues.get(1).getLongValue();
return myDataToStrings(data);
}
}
| [BEAM-11053] Set projectId in BigQuery service.
| examples/java/src/test/java/org/apache/beam/examples/snippets/transforms/io/gcp/bigquery/BigQuerySamplesIT.java | [BEAM-11053] Set projectId in BigQuery service. | <ide><path>xamples/java/src/test/java/org/apache/beam/examples/snippets/transforms/io/gcp/bigquery/BigQuerySamplesIT.java
<ide> public class BigQuerySamplesIT {
<ide> private static final String PROJECT =
<ide> TestPipeline.testingPipelineOptions().as(GcpOptions.class).getProject();
<del> private static final BigQuery BIGQUERY = BigQueryOptions.getDefaultInstance().getService();
<add> private static final BigQuery BIGQUERY =
<add> BigQueryOptions.newBuilder().setProjectId(PROJECT).build().getService();
<ide> private static final String DATASET =
<ide> "beam_bigquery_samples_" + System.currentTimeMillis() + "_" + new SecureRandom().nextInt(32);
<ide> |
|
Java | mit | 7c3354fdced54ec051315167db55120440e084d7 | 0 | AndrewQuijano/SSTREU2017 | SQLRead.java | import java.math.BigInteger;
import java.sql.*;
import java.util.ArrayList;
import java.util.HashMap;
/**
* A Java MySQL SELECT statement example.
* Demonstrates the use of a SQL SELECT statement against a
* MySQL database, called from a Java program.
*
* Created by Alvin Alexander, http://devdaily.com
*/
public class SQLRead
{
private final String myDriver = "org.gjt.mm.mysql.Driver";
private final String URL = "jdbc:mysql://localhost/";
private String username = "root";
private String password = "SSTREU2017";
String DB = "FIU";
private final String myUrl = URL + DB;
String Query;
boolean isSecure;
private final static int VECTORSIZE = 10;
ArrayList<BigInteger []> S1Array = new ArrayList<BigInteger []>();
ArrayList<Integer []> RSSArray = new ArrayList<Integer []>();
HashMap<Integer, Double []> PKandLocationSet = new HashMap<Integer, Double[]>();
private final static String PLAINLUT = "REUPlainLUT";
private final static String SECRETLUT = "REUEncryptedLUT";
public SQLRead(String Q, Boolean isSec)
{
Query = Q;
isSecure=isSec;
}
public ArrayList<Integer []> getRSS() {return RSSArray;}
public ArrayList<BigInteger []> getS1() {return S1Array;}
public HashMap<Integer, Double[]> getLocationData() {return PKandLocationSet;}
public void execute()
{
try
{
Class.forName(myDriver);
Connection conn = DriverManager.getConnection(myUrl, username, password);
// our SQL SELECT query.
// if you only need a few columns, specify them by name instead of using "*"
// create the java statement
Statement st = conn.createStatement();
// execute the query, and get a java result set
ResultSet rs = st.executeQuery(Query);
//int index;
if (isSecure==false)
{
while(rs.next())
{
Integer [] RSS = new Integer [VECTORSIZE];
Double [] Location = new Double [2];
Location[0] = rs.getDouble("Xcoordinate");
Location[1] = rs.getDouble("Ycoordinate");
PKandLocationSet.put(rs.getInt("ID"), Location);
RSS[0] = rs.getInt("ONE");
RSS[1] = rs.getInt("TWO");
RSS[2] = rs.getInt("THREE");
RSS[3] = rs.getInt("FOUR");
RSS[4] = rs.getInt("FIVE");
RSS[5] = rs.getInt("SIX");
RSS[6] = rs.getInt("SEVEN");
RSS[7] = rs.getInt("EIGHT");
RSS[8] = rs.getInt("NINE");
RSS[9] = rs.getInt("TEN");
RSSArray.add(RSS);
}
}
else
{
while(rs.next())
{
BigInteger [] S1 = new BigInteger [VECTORSIZE];
Double [] Location = new Double [2];
Location[0] = rs.getDouble("Xcoordinate");
Location[1] = rs.getDouble("Ycoordinate");
PKandLocationSet.put(rs.getInt("ID"), Location);
S1[0] = new BigInteger(rs.getString("ONE"));
S1[1] = new BigInteger(rs.getString("TWO"));
S1[2] = new BigInteger(rs.getString("THREE"));
S1[3] = new BigInteger(rs.getString("FOUR"));
S1[4] = new BigInteger(rs.getString("FIVE"));
S1[5] = new BigInteger(rs.getString("SIX"));
S1[6] = new BigInteger(rs.getString("SEVEN"));
S1[7] = new BigInteger(rs.getString("EIGHT"));
S1[8] = new BigInteger(rs.getString("NINE"));
S1[9] = new BigInteger(rs.getString("TEN"));
S1Array.add(S1);
}
st.close();
//I also need plain text to compute S2
Statement partTwo = conn.createStatement();
ResultSet S2 = partTwo.executeQuery("select * from " + PLAINLUT + ";");
while(S2.next())
{
Integer [] RSS = new Integer [VECTORSIZE];
RSS[0] = rs.getInt("ONE");
RSS[1] = rs.getInt("TWO");
RSS[2] = rs.getInt("THREE");
RSS[3] = rs.getInt("FOUR");
RSS[4] = rs.getInt("FIVE");
RSS[5] = rs.getInt("SIX");
RSS[6] = rs.getInt("SEVEN");
RSS[7] = rs.getInt("EIGHT");
RSS[8] = rs.getInt("NINE");
RSS[9] = rs.getInt("TEN");
RSSArray.add(RSS);
}
S2.close();
}
}
catch(SQLException se)
{
System.err.println("SQL EXCEPTION SPOTTED!!!");
se.printStackTrace();
}
catch (ClassNotFoundException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
}
} | Delete SQLRead.java | SQLRead.java | Delete SQLRead.java | <ide><path>QLRead.java
<del>import java.math.BigInteger;
<del>import java.sql.*;
<del>import java.util.ArrayList;
<del>import java.util.HashMap;
<del>
<del>/**
<del> * A Java MySQL SELECT statement example.
<del> * Demonstrates the use of a SQL SELECT statement against a
<del> * MySQL database, called from a Java program.
<del> *
<del> * Created by Alvin Alexander, http://devdaily.com
<del> */
<del>public class SQLRead
<del>{
<del> private final String myDriver = "org.gjt.mm.mysql.Driver";
<del> private final String URL = "jdbc:mysql://localhost/";
<del> private String username = "root";
<del> private String password = "SSTREU2017";
<del> String DB = "FIU";
<del> private final String myUrl = URL + DB;
<del> String Query;
<del> boolean isSecure;
<del> private final static int VECTORSIZE = 10;
<del> ArrayList<BigInteger []> S1Array = new ArrayList<BigInteger []>();
<del> ArrayList<Integer []> RSSArray = new ArrayList<Integer []>();
<del> HashMap<Integer, Double []> PKandLocationSet = new HashMap<Integer, Double[]>();
<del>
<del> private final static String PLAINLUT = "REUPlainLUT";
<del> private final static String SECRETLUT = "REUEncryptedLUT";
<del>
<del> public SQLRead(String Q, Boolean isSec)
<del> {
<del> Query = Q;
<del> isSecure=isSec;
<del> }
<del>
<del> public ArrayList<Integer []> getRSS() {return RSSArray;}
<del> public ArrayList<BigInteger []> getS1() {return S1Array;}
<del> public HashMap<Integer, Double[]> getLocationData() {return PKandLocationSet;}
<del>
<del> public void execute()
<del> {
<del> try
<del> {
<del> Class.forName(myDriver);
<del> Connection conn = DriverManager.getConnection(myUrl, username, password);
<del>
<del> // our SQL SELECT query.
<del> // if you only need a few columns, specify them by name instead of using "*"
<del>
<del> // create the java statement
<del> Statement st = conn.createStatement();
<del> // execute the query, and get a java result set
<del> ResultSet rs = st.executeQuery(Query);
<del>
<del> //int index;
<del>
<del> if (isSecure==false)
<del> {
<del> while(rs.next())
<del> {
<del> Integer [] RSS = new Integer [VECTORSIZE];
<del> Double [] Location = new Double [2];
<del> Location[0] = rs.getDouble("Xcoordinate");
<del> Location[1] = rs.getDouble("Ycoordinate");
<del> PKandLocationSet.put(rs.getInt("ID"), Location);
<del> RSS[0] = rs.getInt("ONE");
<del> RSS[1] = rs.getInt("TWO");
<del> RSS[2] = rs.getInt("THREE");
<del> RSS[3] = rs.getInt("FOUR");
<del> RSS[4] = rs.getInt("FIVE");
<del> RSS[5] = rs.getInt("SIX");
<del> RSS[6] = rs.getInt("SEVEN");
<del> RSS[7] = rs.getInt("EIGHT");
<del> RSS[8] = rs.getInt("NINE");
<del> RSS[9] = rs.getInt("TEN");
<del> RSSArray.add(RSS);
<del> }
<del> }
<del> else
<del> {
<del> while(rs.next())
<del> {
<del> BigInteger [] S1 = new BigInteger [VECTORSIZE];
<del> Double [] Location = new Double [2];
<del> Location[0] = rs.getDouble("Xcoordinate");
<del> Location[1] = rs.getDouble("Ycoordinate");
<del> PKandLocationSet.put(rs.getInt("ID"), Location);
<del> S1[0] = new BigInteger(rs.getString("ONE"));
<del> S1[1] = new BigInteger(rs.getString("TWO"));
<del> S1[2] = new BigInteger(rs.getString("THREE"));
<del> S1[3] = new BigInteger(rs.getString("FOUR"));
<del> S1[4] = new BigInteger(rs.getString("FIVE"));
<del> S1[5] = new BigInteger(rs.getString("SIX"));
<del> S1[6] = new BigInteger(rs.getString("SEVEN"));
<del> S1[7] = new BigInteger(rs.getString("EIGHT"));
<del> S1[8] = new BigInteger(rs.getString("NINE"));
<del> S1[9] = new BigInteger(rs.getString("TEN"));
<del> S1Array.add(S1);
<del> }
<del>
<del> st.close();
<del> //I also need plain text to compute S2
<del> Statement partTwo = conn.createStatement();
<del> ResultSet S2 = partTwo.executeQuery("select * from " + PLAINLUT + ";");
<del> while(S2.next())
<del> {
<del> Integer [] RSS = new Integer [VECTORSIZE];
<del> RSS[0] = rs.getInt("ONE");
<del> RSS[1] = rs.getInt("TWO");
<del> RSS[2] = rs.getInt("THREE");
<del> RSS[3] = rs.getInt("FOUR");
<del> RSS[4] = rs.getInt("FIVE");
<del> RSS[5] = rs.getInt("SIX");
<del> RSS[6] = rs.getInt("SEVEN");
<del> RSS[7] = rs.getInt("EIGHT");
<del> RSS[8] = rs.getInt("NINE");
<del> RSS[9] = rs.getInt("TEN");
<del> RSSArray.add(RSS);
<del> }
<del> S2.close();
<del> }
<del> }
<del> catch(SQLException se)
<del> {
<del> System.err.println("SQL EXCEPTION SPOTTED!!!");
<del> se.printStackTrace();
<del> }
<del> catch (ClassNotFoundException e)
<del> {
<del> // TODO Auto-generated catch block
<del> e.printStackTrace();
<del> }
<del> }
<del>
<del>} |
||
Java | apache-2.0 | b3dba1f864be27403763aa8a172710530f9a7307 | 0 | softindex/datakernel,softindex/datakernel,softindex/datakernel | /*
* Copyright (C) 2015 SoftIndex LLC.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.datakernel.cube.api;
import com.google.common.base.Function;
import com.google.common.base.Predicate;
import com.google.common.collect.Iterables;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import io.datakernel.aggregation_db.AggregationQuery;
import io.datakernel.aggregation_db.AggregationStructure;
import io.datakernel.aggregation_db.keytype.KeyType;
import io.datakernel.async.ResultCallback;
import io.datakernel.codegen.utils.DefiningClassLoader;
import io.datakernel.cube.Cube;
import io.datakernel.eventloop.NioEventloop;
import io.datakernel.http.HttpRequest;
import io.datakernel.http.HttpResponse;
import io.datakernel.http.server.AsyncHttpServlet;
import io.datakernel.stream.StreamConsumers;
import io.datakernel.util.Stopwatch;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.Set;
import static com.google.common.collect.Lists.newArrayList;
import static io.datakernel.cube.api.CommonUtils.*;
public final class DimensionsRequestHandler implements AsyncHttpServlet {
private static final Logger logger = LoggerFactory.getLogger(DimensionsRequestHandler.class);
private final Gson gson;
private final Cube cube;
private final NioEventloop eventloop;
private final DefiningClassLoader classLoader;
public DimensionsRequestHandler(Gson gson, Cube cube, NioEventloop eventloop, DefiningClassLoader classLoader) {
this.gson = gson;
this.cube = cube;
this.eventloop = eventloop;
this.classLoader = classLoader;
}
@SuppressWarnings("unchecked")
@Override
public void serveAsync(final HttpRequest request, final ResultCallback<HttpResponse> callback) {
logger.info("Got request {} for dimensions.", request);
final Stopwatch sw = Stopwatch.createStarted();
String predicatesJson = request.getParameter("filters");
String measuresJson = request.getParameter("measures");
final String dimension = request.getParameter("dimension");
AggregationQuery.QueryPredicates queryPredicates = gson.fromJson(predicatesJson, AggregationQuery.QueryPredicates.class);
List<String> measures = getListOfStrings(gson, measuresJson);
List<String> chain = cube.buildDrillDownChain(queryPredicates.keys(), dimension);
final Set<String> childrenDimensions = cube.findChildrenDimensions(dimension);
List<AggregationQuery.QueryPredicate> filteredPredicates = newArrayList(Iterables.filter(queryPredicates.asCollection(), new Predicate<AggregationQuery.QueryPredicate>() {
@Override
public boolean apply(AggregationQuery.QueryPredicate predicate) {
return !childrenDimensions.contains(predicate.key) && !predicate.key.equals(dimension);
}
}));
List<String> predicateKeys = newArrayList(Iterables.transform(filteredPredicates, new Function<AggregationQuery.QueryPredicate, String>() {
@Override
public String apply(AggregationQuery.QueryPredicate queryPredicate) {
return queryPredicate.key;
}
}));
Set<String> availableMeasures = cube.getAvailableMeasures(newArrayList(Iterables.concat(chain, predicateKeys)), measures);
final AggregationQuery query = new AggregationQuery()
.keys(chain)
.fields(newArrayList(availableMeasures))
.predicates(filteredPredicates);
final Class<?> resultClass = cube.getStructure().createResultClass(query);
final StreamConsumers.ToList consumerStream = queryCube(resultClass, query, cube, eventloop);
consumerStream.setResultCallback(new ResultCallback<List>() {
@Override
public void onResult(List result) {
String jsonResult = constructDimensionsJson(cube, resultClass, result, query, classLoader);
callback.onResult(createResponse(jsonResult));
logger.info("Sent response to /dimensions request {} (query: {}) in {}", request, query, sw);
}
@Override
public void onException(Exception e) {
callback.onResult(response500(e));
logger.error("Sending response to /dimensions query failed. Constructed query: {}", query, e);
}
});
}
public static <T> String constructDimensionsJson(Cube cube, Class<?> resultClass, List<T> results, AggregationQuery query,
DefiningClassLoader classLoader) {
List<String> resultKeys = query.getResultKeys();
JsonArray jsonResults = new JsonArray();
AggregationStructure structure = cube.getStructure();
FieldGetter[] keyGetters = new FieldGetter[resultKeys.size()];
KeyType[] keyTypes = new KeyType[resultKeys.size()];
for (int i = 0; i < resultKeys.size(); i++) {
String key = resultKeys.get(i);
keyGetters[i] = generateGetter(classLoader, resultClass, key);
keyTypes[i] = structure.getKeyType(key);
}
for (T result : results) {
JsonObject resultJsonObject = new JsonObject();
for (int i = 0; i < resultKeys.size(); i++) {
resultJsonObject.add(resultKeys.get(i), keyTypes[i].toJson(keyGetters[i].get(result)));
}
jsonResults.add(resultJsonObject);
}
return jsonResults.toString();
}
}
| cube/src/main/java/io/datakernel/cube/api/DimensionsRequestHandler.java | /*
* Copyright (C) 2015 SoftIndex LLC.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.datakernel.cube.api;
import com.google.common.base.Predicate;
import com.google.common.collect.Iterables;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import io.datakernel.aggregation_db.AggregationQuery;
import io.datakernel.aggregation_db.AggregationStructure;
import io.datakernel.aggregation_db.keytype.KeyType;
import io.datakernel.async.ResultCallback;
import io.datakernel.codegen.utils.DefiningClassLoader;
import io.datakernel.cube.Cube;
import io.datakernel.eventloop.NioEventloop;
import io.datakernel.http.HttpRequest;
import io.datakernel.http.HttpResponse;
import io.datakernel.http.server.AsyncHttpServlet;
import io.datakernel.stream.StreamConsumers;
import io.datakernel.util.Stopwatch;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.Set;
import static com.google.common.collect.Lists.newArrayList;
import static io.datakernel.cube.api.CommonUtils.*;
public final class DimensionsRequestHandler implements AsyncHttpServlet {
private static final Logger logger = LoggerFactory.getLogger(DimensionsRequestHandler.class);
private final Gson gson;
private final Cube cube;
private final NioEventloop eventloop;
private final DefiningClassLoader classLoader;
public DimensionsRequestHandler(Gson gson, Cube cube, NioEventloop eventloop, DefiningClassLoader classLoader) {
this.gson = gson;
this.cube = cube;
this.eventloop = eventloop;
this.classLoader = classLoader;
}
@SuppressWarnings("unchecked")
@Override
public void serveAsync(final HttpRequest request, final ResultCallback<HttpResponse> callback) {
logger.info("Got request {} for dimensions.", request);
final Stopwatch sw = Stopwatch.createStarted();
String predicatesJson = request.getParameter("filters");
String measuresJson = request.getParameter("measures");
final String dimension = request.getParameter("dimension");
AggregationQuery.QueryPredicates queryPredicates = gson.fromJson(predicatesJson, AggregationQuery.QueryPredicates.class);
List<String> measures = getListOfStrings(gson, measuresJson);
List<String> chain = cube.buildDrillDownChain(queryPredicates.keys(), dimension);
final Set<String> childrenDimensions = cube.findChildrenDimensions(dimension);
List<AggregationQuery.QueryPredicate> filteredPredicates = newArrayList(Iterables.filter(queryPredicates.asCollection(), new Predicate<AggregationQuery.QueryPredicate>() {
@Override
public boolean apply(AggregationQuery.QueryPredicate predicate) {
return !childrenDimensions.contains(predicate.key) && !predicate.key.equals(dimension);
}
}));
Set<String> availableMeasures = cube.getAvailableMeasures(chain, measures);
final AggregationQuery query = new AggregationQuery()
.keys(chain)
.fields(newArrayList(availableMeasures))
.predicates(filteredPredicates);
final Class<?> resultClass = cube.getStructure().createResultClass(query);
final StreamConsumers.ToList consumerStream = queryCube(resultClass, query, cube, eventloop);
consumerStream.setResultCallback(new ResultCallback<List>() {
@Override
public void onResult(List result) {
String jsonResult = constructDimensionsJson(cube, resultClass, result, query, classLoader);
callback.onResult(createResponse(jsonResult));
logger.info("Sent response to /dimensions request {} (query: {}) in {}", request, query, sw);
}
@Override
public void onException(Exception e) {
callback.onResult(response500(e));
logger.error("Sending response to /dimensions query failed. Constructed query: {}", query, e);
}
});
}
public static <T> String constructDimensionsJson(Cube cube, Class<?> resultClass, List<T> results, AggregationQuery query,
DefiningClassLoader classLoader) {
List<String> resultKeys = query.getResultKeys();
JsonArray jsonResults = new JsonArray();
AggregationStructure structure = cube.getStructure();
FieldGetter[] keyGetters = new FieldGetter[resultKeys.size()];
KeyType[] keyTypes = new KeyType[resultKeys.size()];
for (int i = 0; i < resultKeys.size(); i++) {
String key = resultKeys.get(i);
keyGetters[i] = generateGetter(classLoader, resultClass, key);
keyTypes[i] = structure.getKeyType(key);
}
for (T result : results) {
JsonObject resultJsonObject = new JsonObject();
for (int i = 0; i < resultKeys.size(); i++) {
resultJsonObject.add(resultKeys.get(i), keyTypes[i].toJson(keyGetters[i].get(result)));
}
jsonResults.add(resultJsonObject);
}
return jsonResults.toString();
}
}
| Fix DimensionsRequestHandler
| cube/src/main/java/io/datakernel/cube/api/DimensionsRequestHandler.java | Fix DimensionsRequestHandler | <ide><path>ube/src/main/java/io/datakernel/cube/api/DimensionsRequestHandler.java
<ide>
<ide> package io.datakernel.cube.api;
<ide>
<add>import com.google.common.base.Function;
<ide> import com.google.common.base.Predicate;
<ide> import com.google.common.collect.Iterables;
<ide> import com.google.gson.Gson;
<ide> return !childrenDimensions.contains(predicate.key) && !predicate.key.equals(dimension);
<ide> }
<ide> }));
<del> Set<String> availableMeasures = cube.getAvailableMeasures(chain, measures);
<add> List<String> predicateKeys = newArrayList(Iterables.transform(filteredPredicates, new Function<AggregationQuery.QueryPredicate, String>() {
<add> @Override
<add> public String apply(AggregationQuery.QueryPredicate queryPredicate) {
<add> return queryPredicate.key;
<add> }
<add> }));
<add> Set<String> availableMeasures = cube.getAvailableMeasures(newArrayList(Iterables.concat(chain, predicateKeys)), measures);
<ide>
<ide> final AggregationQuery query = new AggregationQuery()
<ide> .keys(chain) |
|
JavaScript | agpl-3.0 | 53df74540357a7e4074851681550ecbe1998390d | 0 | StoDevX/AAO-React-Native,StoDevX/AAO-React-Native,StoDevX/AAO-React-Native,StoDevX/AAO-React-Native,StoDevX/AAO-React-Native,StoDevX/AAO-React-Native,StoDevX/AAO-React-Native,StoDevX/AAO-React-Native,StoDevX/AAO-React-Native | // @flow
import {AppRegistry, YellowBox} from 'react-native'
import App from './app'
YellowBox.ignoreWarnings(['Invalid prop `containerTagName`'])
AppRegistry.registerComponent('AllAboutOlaf', () => App)
| source/root.js | // @flow
import {AppRegistry, YellowBox} from 'react-native'
import App from './app'
YellowBox.ignoreWarnings([])
AppRegistry.registerComponent('AllAboutOlaf', () => App)
| Ignore specific proptype warning for react markdown
| source/root.js | Ignore specific proptype warning for react markdown | <ide><path>ource/root.js
<ide> import {AppRegistry, YellowBox} from 'react-native'
<ide> import App from './app'
<ide>
<del>YellowBox.ignoreWarnings([])
<add>YellowBox.ignoreWarnings(['Invalid prop `containerTagName`'])
<ide>
<ide> AppRegistry.registerComponent('AllAboutOlaf', () => App) |
|
Java | mit | 2fa1a9c8f8fb64e6ee6f2f61f7686f9ae4835b41 | 0 | JOML-CI/JOML,JOML-CI/JOML,JOML-CI/JOML | package org.joml.test;
import java.util.Random;
import junit.framework.TestCase;
import org.joml.Matrix4f;
import org.joml.TrapezoidOrthoCrop;
import org.joml.Vector3f;
/**
* Tests for the {@link TrapezoidOrthoCrop} class.
*
* @author Kai Burjack
*/
public class TrapezoidOrthoCropTest extends TestCase {
public static void testRandomNdcWithinBounds() {
Matrix4f camViewProj = new Matrix4f().perspective((float) Math.toRadians(90.0), 1.0f, 0.1f, 10.0f).lookAt(0, 0, 5, 0, 0, 0, 0, 1, 0);
Matrix4f invCamViewProj = new Matrix4f();
camViewProj.invert(invCamViewProj);
Matrix4f lightView = new Matrix4f().lookAt(0, 5, 0, 0, 0, 0, 0, 0, -1);
Matrix4f crop = new TrapezoidOrthoCrop().compute(camViewProj, lightView, 3.0f, new Matrix4f());
Vector3f corner = new Vector3f();
Random rnd = new Random();
for (int i = 0; i < 5000; i++) {
float x = rnd.nextFloat() * 2.0f - 1.0f;
float y = rnd.nextFloat() * 2.0f - 1.0f;
float z = rnd.nextFloat() * 2.0f - 1.0f;
corner.set(x, y, z);
invCamViewProj.transformProject(corner);
lightView.transformProject(corner);
crop.transformProject(corner);
float d = 1E-5f;
boolean withinBounds =
corner.x >= -1.0f - d && corner.x <= +1.0f + d &&
corner.y >= -1.0f - d && corner.y <= +1.0f + d &&
corner.z >= -1.0f - d && corner.z <= +1.0f + d;
assertTrue(withinBounds);
}
}
}
| test/org/joml/test/TrapezoidOrthoCropTest.java | package org.joml.test;
import java.util.Random;
import junit.framework.TestCase;
import org.joml.Matrix4f;
import org.joml.TrapezoidOrthoCrop;
import org.joml.Vector3f;
/**
* Tests for the {@link TrapezoidOrthoCrop} class.
*
* @author Kai Burjack
*/
public class TrapezoidOrthoCropTest extends TestCase {
public static void testRandomNdcWithinBounds() {
Matrix4f camViewProj = new Matrix4f().perspective((float) Math.toRadians(90.0), 1.0f, 0.1f, 10.0f).lookAt(0, 0, 5, 0, 0, 0, 0, 1, 0);
Matrix4f invCamViewProj = new Matrix4f();
camViewProj.invert(invCamViewProj);
Matrix4f lightView = new Matrix4f().lookAt(0, 5, 0, 0, 0, 0, 0, 0, -1);
Matrix4f crop = new TrapezoidOrthoCrop().compute(camViewProj, lightView, 3.0f, new Matrix4f());
Vector3f corner = new Vector3f();
Random rnd = new Random();
float minZ = Float.MAX_VALUE, maxZ = -Float.MAX_VALUE;
for (int i = 0; i < 5000; i++) {
float x = rnd.nextFloat() * 2.0f - 1.0f;
float y = rnd.nextFloat() * 2.0f - 1.0f;
float z = rnd.nextFloat() * 2.0f - 1.0f;
corner.set(x, y, z);
invCamViewProj.transformProject(corner);
lightView.transformProject(corner);
crop.transformProject(corner);
float d = 1E5f;
boolean withinBounds =
corner.x >= -1.0f - d && corner.x <= +1.0f + d &&
corner.y >= -1.0f - d && corner.y <= +1.0f + d &&
corner.z >= -1.0f - d && corner.z <= +1.0f + d;
assertTrue(withinBounds);
minZ = minZ < corner.z ? minZ : corner.z;
maxZ = maxZ > corner.z ? maxZ : corner.z;
}
}
}
| Fix test | test/org/joml/test/TrapezoidOrthoCropTest.java | Fix test | <ide><path>est/org/joml/test/TrapezoidOrthoCropTest.java
<ide> Matrix4f crop = new TrapezoidOrthoCrop().compute(camViewProj, lightView, 3.0f, new Matrix4f());
<ide> Vector3f corner = new Vector3f();
<ide> Random rnd = new Random();
<del> float minZ = Float.MAX_VALUE, maxZ = -Float.MAX_VALUE;
<ide> for (int i = 0; i < 5000; i++) {
<ide> float x = rnd.nextFloat() * 2.0f - 1.0f;
<ide> float y = rnd.nextFloat() * 2.0f - 1.0f;
<ide> invCamViewProj.transformProject(corner);
<ide> lightView.transformProject(corner);
<ide> crop.transformProject(corner);
<del> float d = 1E5f;
<add> float d = 1E-5f;
<ide> boolean withinBounds =
<ide> corner.x >= -1.0f - d && corner.x <= +1.0f + d &&
<ide> corner.y >= -1.0f - d && corner.y <= +1.0f + d &&
<ide> corner.z >= -1.0f - d && corner.z <= +1.0f + d;
<ide> assertTrue(withinBounds);
<del> minZ = minZ < corner.z ? minZ : corner.z;
<del> maxZ = maxZ > corner.z ? maxZ : corner.z;
<ide> }
<ide> }
<ide> |
|
Java | lgpl-2.1 | 8b1ee8634e7fee9dbbfc941a89b70f34707875df | 0 | ggiudetti/opencms-core,alkacon/opencms-core,gallardo/opencms-core,ggiudetti/opencms-core,it-tavis/opencms-core,gallardo/opencms-core,gallardo/opencms-core,MenZil/opencms-core,it-tavis/opencms-core,victos/opencms-core,it-tavis/opencms-core,gallardo/opencms-core,victos/opencms-core,mediaworx/opencms-core,MenZil/opencms-core,victos/opencms-core,alkacon/opencms-core,sbonoc/opencms-core,MenZil/opencms-core,victos/opencms-core,mediaworx/opencms-core,it-tavis/opencms-core,ggiudetti/opencms-core,alkacon/opencms-core,sbonoc/opencms-core,sbonoc/opencms-core,mediaworx/opencms-core,sbonoc/opencms-core,mediaworx/opencms-core,alkacon/opencms-core,MenZil/opencms-core,ggiudetti/opencms-core | /*
* This library is part of OpenCms -
* the Open Source Content Management System
*
* Copyright (c) Alkacon Software GmbH (http://www.alkacon.com)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* For further information about Alkacon Software GmbH, please see the
* company website: http://www.alkacon.com
*
* For further information about OpenCms, please see the
* project website: http://www.opencms.org
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.opencms.test;
import org.opencms.configuration.CmsParameterConfiguration;
import org.opencms.db.CmsDbPool;
import org.opencms.db.CmsResourceState;
import org.opencms.file.CmsFile;
import org.opencms.file.CmsGroup;
import org.opencms.file.CmsObject;
import org.opencms.file.CmsProject;
import org.opencms.file.CmsProperty;
import org.opencms.file.CmsPropertyDefinition;
import org.opencms.file.CmsResource;
import org.opencms.file.CmsResourceFilter;
import org.opencms.file.CmsUser;
import org.opencms.file.history.CmsHistoryFile;
import org.opencms.file.history.I_CmsHistoryResource;
import org.opencms.file.types.CmsResourceTypeBinary;
import org.opencms.file.types.CmsResourceTypeFolder;
import org.opencms.file.types.CmsResourceTypePlain;
import org.opencms.importexport.CmsImportParameters;
import org.opencms.lock.CmsLock;
import org.opencms.lock.CmsLockType;
import org.opencms.main.CmsException;
import org.opencms.main.CmsShell;
import org.opencms.main.CmsSystemInfo;
import org.opencms.main.OpenCms;
import org.opencms.publish.CmsPublishJobBase;
import org.opencms.publish.CmsPublishJobInfoBean;
import org.opencms.relations.CmsRelation;
import org.opencms.report.CmsShellReport;
import org.opencms.report.I_CmsReport;
import org.opencms.security.CmsAccessControlEntry;
import org.opencms.security.CmsAccessControlList;
import org.opencms.security.CmsPermissionSet;
import org.opencms.security.CmsPermissionSetCustom;
import org.opencms.security.I_CmsPrincipal;
import org.opencms.setup.CmsSetupDb;
import org.opencms.util.CmsDateUtil;
import org.opencms.util.CmsFileUtil;
import org.opencms.util.CmsUUID;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.Serializable;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;
import junit.extensions.TestSetup;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.apache.commons.io.filefilter.FileFilterUtils;
import org.dom4j.Document;
import org.dom4j.Node;
import org.dom4j.util.NodeComparator;
/**
* Extends the JUnit standard with methods to handle an OpenCms database
* test instance.<p>
*
* The required configuration files are located in the
* <code>${test.data.path}/WEB-INF</code> folder structure.<p>
*
* To run this test you might have to change the database connection
* values in the provided <code>${test.data.path}/WEB-INF/config/opencms.properties</code> file.<p>
*
* @since 6.0.0
*/
public class OpenCmsTestCase extends TestCase {
/** Class to bundle the connection information. */
protected static class ConnectionData {
/** The name of the database. */
public String m_dbName;
/** The database driver. */
public String m_jdbcDriver;
/** The database url. */
public String m_jdbcUrl;
/** Additional database parameters. */
public String m_jdbcUrlParams;
/** The name of the user. */
public String m_userName;
/** The password of the user. */
public String m_userPassword;
}
/**
* Extension of <code>NodeComparator</code> to store unequal nodes.<p>
*/
static class InternalNodeComparator extends NodeComparator implements Serializable {
/** UID required for safe serialization. */
private static final long serialVersionUID = 2742216550970181832L;
/** Unequal node1. */
public Node m_node1;
/** Unequal node2. */
public Node m_node2;
/**
* @see org.dom4j.util.NodeComparator#compare(org.dom4j.Node, org.dom4j.Node)
*/
@Override
public int compare(Node n1, Node n2) {
int result = super.compare(n1, n2);
if ((result != 0) && (m_node1 == null)) {
m_node1 = n1;
m_node2 = n2;
}
return result;
}
}
/** test article type id constant. */
public static final int ARTICLE_TYPEID = 27;
/** Special character constant. */
public static final String C_AUML_LOWER = "\u00e4";
/** Special character constant. */
public static final String C_AUML_UPPER = "\u00c4";
/** Special character constant. */
public static final String C_EURO = "\u20ac";
/** Special character constant. */
public static final String C_OUML_LOWER = "\u00f6";
/** Special character constant. */
public static final String C_OUML_UPPER = "\u00d6";
/** Special character constant. */
public static final String C_SHARP_S = "\u00df";
/** Special character constant. */
public static final String C_UUML_LOWER = "\u00fc";
/** Special character constant. */
public static final String C_UUML_UPPER = "\u00dc";
/** Key for tests on MySql database. */
public static final String DB_MYSQL = "mysql";
/** Key for tests on Oracle database. */
public static final String DB_ORACLE = "oracle";
/** The OpenCms/database configuration. */
public static CmsParameterConfiguration m_configuration;
/** Name of the default tablespace (oracle only). */
public static String m_defaultTablespace;
/** Name of the index tablespace (oracle only). */
public static String m_indexTablespace;
/** The internal storages. */
public static HashMap<String, OpenCmsTestResourceStorage> m_resourceStorages;
/** Name of the temporary tablespace (oracle only). */
public static String m_tempTablespace;
/** Additional connection data. */
protected static ConnectionData m_additionalConnection;
/** The user connection data. */
protected static ConnectionData m_defaultConnection;
/** The setup connection data. */
protected static ConnectionData m_setupConnection;
/** The cached list of OpenCms class names. */
private static List<String> classNameList;
/** The file date of the configuration files. */
private static long[] m_dateConfigFiles;
/** DB product used for the tests. */
private static String m_dbProduct = DB_MYSQL;
/** The path to the default setup data files. */
private static String m_setupDataPath;
/** The initialized OpenCms shell instance. */
private static CmsShell m_shell;
/** The list of paths to the additional test data files. */
private static List<String> m_testDataPath;
/** The current resource storage. */
public OpenCmsTestResourceStorage m_currentResourceStrorage;
/**
* Default JUnit constructor.<p>
*
* @param arg0 JUnit parameters
*/
public OpenCmsTestCase(String arg0) {
this(arg0, true);
}
/**
* JUnit constructor.<p>
* @param arg0 JUnit parameters
* @param initialize indicates if the configuration will be initialized
*/
public OpenCmsTestCase(String arg0, boolean initialize) {
super(arg0);
if (initialize) {
OpenCmsTestLogAppender.setBreakOnError(false);
if (m_resourceStorages == null) {
m_resourceStorages = new HashMap<String, OpenCmsTestResourceStorage>();
}
// initialize configuration
initConfiguration();
// set "OpenCmsLog" system property to enable the logger
OpenCmsTestLogAppender.setBreakOnError(true);
}
}
/**
* Generates a sub tree of folders with files.<p>
*
* @param cms the cms context
* @param vfsFolder name of the folder
* @param numberOfFiles the number of files to generate
* @param fileTypeDistribution a percentage: x% binary files and (1-x)% text files
*
* @return the number of files generated
*
* @throws Exception if something goes wrong
*/
public static int generateContent(CmsObject cms, String vfsFolder, int numberOfFiles, double fileTypeDistribution)
throws Exception {
int maxProps = 10;
double propertyDistribution = 0.0;
int writtenFiles = 0;
int numberOfBinaryFiles = (int)(numberOfFiles * fileTypeDistribution);
// generate binary files
writtenFiles += generateResources(
cms,
"org/opencms/search/pdf-test-112.pdf",
vfsFolder,
numberOfBinaryFiles,
CmsResourceTypeBinary.getStaticTypeId(),
maxProps,
propertyDistribution);
// generate text files
writtenFiles += generateResources(cms, "org/opencms/search/extractors/test1.html", vfsFolder, numberOfFiles
- numberOfBinaryFiles, CmsResourceTypePlain.getStaticTypeId(), maxProps, propertyDistribution);
System.out.println("" + writtenFiles + " files written in Folder " + vfsFolder);
return writtenFiles;
}
/**
* Generates a sub tree of folders with files.<p>
*
* @param cms the cms context
* @param vfsFolder where to create the subtree
* @param maxWidth an upper bound for the number of subfolder a folder should have
* @param maxDepth an upper bound for depth of the genearted subtree
* @param maxProps upper bound for number of properties to create for each resource
* @param propertyDistribution a percentage: x% shared props and (1-x)% individuals props
* @param maxNumberOfFiles upper bound for the number of files in each folder
* @param fileTypeDistribution a percentage: x% binary files and (1-x)% text files
*
* @return the number of really written files
*
* @throws Exception if something goes wrong
*/
public static int generateContent(
CmsObject cms,
String vfsFolder,
int maxWidth,
int maxDepth,
int maxProps,
double propertyDistribution,
int maxNumberOfFiles,
double fileTypeDistribution) throws Exception {
int fileNameLength = 10;
int propValueLength = 10;
// end recursion
if (maxDepth < 1) {
return 0;
}
if (!vfsFolder.endsWith("/")) {
vfsFolder += "/";
}
int writtenFiles = 0;
Random rnd = new Random();
int width = rnd.nextInt(maxWidth) + 1;
int depth = maxDepth - rnd.nextInt(2);
for (int i = 0; i < width; i++) {
// generate folder
String vfsName = vfsFolder + generateName(fileNameLength) + i;
List<CmsProperty> props = generateProperties(cms, maxProps, propValueLength, propertyDistribution);
cms.createResource(vfsName, CmsResourceTypeFolder.getStaticTypeId(), new byte[0], props);
cms.unlockResource(vfsName);
int numberOfFiles = rnd.nextInt(maxNumberOfFiles) + 1;
// generate binary files
int numberOfBinaryFiles = (int)(numberOfFiles * fileTypeDistribution);
writtenFiles += generateResources(
cms,
"org/opencms/search/pdf-test-112.pdf",
vfsName,
numberOfBinaryFiles,
CmsResourceTypeBinary.getStaticTypeId(),
maxProps,
propertyDistribution);
// generate text files
writtenFiles += generateResources(cms, "org/opencms/search/extractors/test1.html", vfsName, numberOfFiles
- numberOfBinaryFiles, CmsResourceTypePlain.getStaticTypeId(), maxProps, propertyDistribution);
// in depth recursion
writtenFiles += generateContent(
cms,
vfsName,
maxWidth,
depth - 1,
maxProps,
propertyDistribution,
maxNumberOfFiles,
fileTypeDistribution);
System.out.println("" + writtenFiles + " files written in Folder " + vfsName);
}
return writtenFiles;
}
/**
* Generate a new random name.<p>
*
* @param maxLen upper bound for the length of the name
*
* @return a random name
*/
public static String generateName(int maxLen) {
String name = "";
Random rnd = new Random();
int len = rnd.nextInt(maxLen) + 1;
for (int j = 0; j < len; j++) {
name += (char)(rnd.nextInt(26) + 97);
}
return name;
}
/**
* Generates random properties.<p>
*
* @param cms the cms context
* @param maxProps upper bound for number of properties to create for each resource
* @param propValueLength upper bound for the number of char for the values
* @param propertyDistribution a percentage: x% shared props and (1-x)% individuals props
*
* @return a list of <code>{@link CmsProperty}</code> objects
*
* @throws CmsException if something goes wrong
*/
public static List<CmsProperty> generateProperties(
CmsObject cms,
int maxProps,
int propValueLength,
double propertyDistribution) throws CmsException {
List<CmsPropertyDefinition> propList = cms.readAllPropertyDefinitions();
List<CmsProperty> props = new ArrayList<CmsProperty>();
if (maxProps > propList.size()) {
maxProps = propList.size();
}
Random rnd = new Random();
int propN = rnd.nextInt(maxProps) + 1;
for (int j = 0; j < propN; j++) {
CmsPropertyDefinition propDef = propList.get((int)(Math.random() * propList.size()));
propList.remove(propDef);
if (Math.random() < propertyDistribution) {
// only resource prop
props.add(new CmsProperty(propDef.getName(), null, generateName(propValueLength)));
} else {
// resource and structure props
props.add(new CmsProperty(
propDef.getName(),
generateName(propValueLength),
generateName(propValueLength)));
}
}
return props;
}
/**
* Generates n new resources in a given folder.<p>
*
* @param cms the cms context
* @param rfsName the rfs file for the content
* @param vfsFolder the folder to create the resources in
* @param n number of resources to generate
* @param type the type of the resource
* @param maxProps upper bound for number of properties to create for each resource
* @param propertyDistribution a percentage: x% shared props and (1-x)% individuals props
*
* @return the number of really written files
*
* @throws Exception if something goes wrong
*/
public static int generateResources(
CmsObject cms,
String rfsName,
String vfsFolder,
int n,
int type,
int maxProps,
double propertyDistribution) throws Exception {
int fileNameLength = 10;
int propValueLength = 10;
if (!vfsFolder.endsWith("/")) {
vfsFolder += "/";
}
int writtenFiles = 0;
System.out.println("Importing Files");
for (int i = 0; i < n; i++) {
String vfsName = vfsFolder + generateName(fileNameLength) + i;
if (rfsName.lastIndexOf('.') > 0) {
vfsName += rfsName.substring(rfsName.lastIndexOf('.'));
}
List<CmsProperty> props = generateProperties(cms, maxProps, propValueLength, propertyDistribution);
try {
OpenCmsTestCase.importTestResource(cms, rfsName, vfsName, type, props);
writtenFiles++;
} catch (Exception e) {
System.out.println("error! " + e.getMessage());
}
}
return writtenFiles;
}
/**
* Generates a wrapper for a test class which handles setting up the OpenCms instance.<p>
*
* @param testClass the test class to wrap
* @param importFolder the RFS folder with the test data to import
* @param targetFolder the VFS target folder for the test data
*
* @return the wrapped test
*/
public static Test generateSetupTestWrapper(
Class<? extends Test> testClass,
final String importFolder,
final String targetFolder) {
try {
TestSuite suite = new TestSuite();
suite.setName(testClass.getName());
Constructor<? extends Test> constructor = testClass.getConstructor(String.class);
for (Method method : testClass.getMethods()) {
String methodName = method.getName();
if (methodName.startsWith("test") && (method.getParameterTypes().length == 0)) {
Test test = constructor.newInstance(method.getName());
suite.addTest(test);
}
}
TestSetup wrapper = new TestSetup(suite) {
/**
* @see junit.extensions.TestSetup#setUp()
*/
@Override
protected void setUp() {
setupOpenCms(importFolder, targetFolder);
}
/**
* @see junit.extensions.TestSetup#tearDown()
*/
@Override
protected void tearDown() {
removeOpenCms();
}
};
return wrapper;
} catch (Throwable e) {
throw new RuntimeException(e);
}
}
/**
* Generates n new users for a given group.<p>
*
* @param cms the cms context
* @param groupName the group name, group will be creating if group does not exists
* @param n number of users to generate
*
* @throws CmsException if something goes wrong
*/
public static void generateUsers(CmsObject cms, String groupName, int n) throws CmsException {
CmsGroup group = null;
try {
group = cms.readGroup(groupName);
} catch (Exception e) {
// ignore
}
if (group == null) {
cms.createGroup(groupName, groupName, 0, null);
}
for (int i = 0; i < n; i++) {
String name = generateName(10) + i;
cms.createUser(name, "pwd" + i, "test user " + i, null);
cms.addUserToGroup(name, groupName);
}
}
/**
* Gets the list of all names of classes which exist as class files in a directory in the classpath (not in JARs) and whose path contains 'opencms' or 'alkacon'.
*
* @return the list of all opencms class on the class path
*
* @throws Exception if something goes wrong
*/
public static List<String> getClassNames() throws Exception {
if (classNameList != null) {
return classNameList;
}
FileFilter filter = new FileFilter() {
public boolean accept(File pathname) {
return pathname.isFile() && pathname.getName().endsWith(".class");
}
};
String[] classpaths = System.getProperty("java.class.path", "").split(File.pathSeparator);
List<String> classNames = new ArrayList<String>();
for (String path : classpaths) {
File baseFile = new File(path);
String basePath = baseFile.getPath();
List<File> classFiles = CmsFileUtil.getFiles(path, filter, true);
for (File classFile : classFiles) {
String relativePath = classFile.getPath().substring(basePath.length());
String className = relativePath.replace("" + File.separatorChar, ".").substring(1).replaceFirst(
"\\.class$",
"");
if ((className.indexOf("opencms") > -1) || (className.indexOf("alkacon") > -1)) {
classNames.add(className);
}
}
}
classNameList = classNames;
return classNames;
}
/**
* Returns the currently used database/configuration.<p>
*
* @return he currently used database/configuration
*/
public static String getDbProduct() {
return m_dbProduct;
}
/**
* Does a database import from the given RFS folder to the given VFS folder.<p>
*
* @param importFolder the RFS folder to import from
* @param targetFolder the VFS folder to import into
*/
public static void importData(String importFolder, String targetFolder) {
// turn off exceptions after error logging during setup (won't work otherwise)
OpenCmsTestLogAppender.setBreakOnError(false);
// output a message
System.out.println("\n\n\n----- Starting test case: Importing OpenCms VFS data -----");
// kill any old shell that might have remained from a previous test
if (m_shell != null) {
try {
m_shell.exit();
m_shell = null;
} catch (Throwable t) {
// ignore
}
}
// create a shell instance
m_shell = new CmsShell(getTestDataPath("WEB-INF" + File.separator), null, null, "${user}@${project}>", null);
// open the test script
File script;
FileInputStream stream = null;
CmsObject cms = null;
try {
// start the shell with the base script
script = new File(getTestDataPath("scripts/script_import.txt"));
stream = new FileInputStream(script);
m_shell.start(stream);
// log in the Admin user and switch to the setup project
cms = OpenCms.initCmsObject(OpenCms.getDefaultUsers().getUserGuest());
cms.loginUser("Admin", "admin");
cms.getRequestContext().setCurrentProject(cms.readProject("tempFileProject"));
if (importFolder != null) {
// import the "simpletest" files
importResources(cms, importFolder, targetFolder);
}
// publish the current project by script
script = new File(getTestDataPath("scripts/script_import_publish.txt"));
stream = new FileInputStream(script);
m_shell.start(stream);
OpenCms.getPublishManager().waitWhileRunning();
// switch to the "Offline" project
cms.getRequestContext().setCurrentProject(cms.readProject("Offline"));
cms.getRequestContext().setSiteRoot("/sites/default/");
// output a message
System.out.println("----- Starting test cases -----");
} catch (Throwable t) {
t.printStackTrace(System.err);
fail("Unable to setup OpenCms\n" + CmsException.getStackTraceAsString(t));
}
// turn on exceptions after error logging
OpenCmsTestLogAppender.setBreakOnError(true);
}
/**
* Initializes the path to the test data configuration files
* using the default path.<p>
*/
public static synchronized void initTestDataPath() {
if (m_testDataPath == null) {
m_testDataPath = new ArrayList<String>(4);
// test wether we are instantiated within the
// AllTest suite and therefore the OpenCmsTestProperties are
// already set up:
try {
OpenCmsTestProperties.getInstance();
} catch (RuntimeException rte) {
OpenCmsTestProperties.initialize(org.opencms.test.AllTests.TEST_PROPERTIES_PATH);
}
// set data path
addTestDataPath(OpenCmsTestProperties.getInstance().getTestDataPath());
}
}
/**
* Removes the initialized OpenCms database and all
* temporary files created during the test run.<p>
*/
public static void removeOpenCms() {
// ensure logging does not throw exceptions
OpenCmsTestLogAppender.setBreakOnError(false);
// output a message
m_shell.printPrompt();
System.out.println("----- Test cases finished -----");
// exit the shell
m_shell.exit();
try {
// sleep 0.5 seconds - sometimes other Threads need to finish before the next test case can start
Thread.sleep(500);
} catch (InterruptedException e) {
// ignore
}
// remove the database
removeDatabase();
String path;
// copy the configuration files to re-create the original configuration
String configFolder = getTestDataPath("WEB-INF" + File.separator + "config." + m_dbProduct + File.separator);
copyConfiguration(configFolder);
// remove potentially created "classes, "lib", "backup" etc. folder
path = getTestDataPath("WEB-INF/classes/");
if (path != null) {
CmsFileUtil.purgeDirectory(new File(path));
}
path = getTestDataPath("WEB-INF/logs/publish");
if (path != null) {
CmsFileUtil.purgeDirectory(new File(path));
}
path = getTestDataPath("WEB-INF/lib/");
if (path != null) {
CmsFileUtil.purgeDirectory(new File(path));
}
path = getTestDataPath("WEB-INF/" + CmsSystemInfo.FOLDER_CONFIG_DEFAULT + "backup/");
if (path != null) {
CmsFileUtil.purgeDirectory(new File(path));
}
path = getTestDataPath("WEB-INF/index/");
if ((path != null) && !m_configuration.containsKey("test.keep.searchIndex")) {
CmsFileUtil.purgeDirectory(new File(path));
}
path = getTestDataPath("export/");
if (path != null) {
CmsFileUtil.purgeDirectory(new File(path));
}
}
/**
* Restarts the OpenCms shell.<p>
*/
public static void restartOpenCms() {
// turn off exceptions after error logging during setup (won't work otherwise)
OpenCmsTestLogAppender.setBreakOnError(false);
// output a message
System.out.println("\n\n\n----- Restarting OpenCms -----");
// kill any old shell that might have remained from a previous test
if (m_shell != null) {
try {
m_shell.exit();
m_shell = null;
} catch (Throwable t) {
// ignore
}
}
// create a shell instance
m_shell = new CmsShell(getTestDataPath("WEB-INF" + File.separator), null, null, "${user}@${project}>", null);
// turn on exceptions after error logging
OpenCmsTestLogAppender.setBreakOnError(true);
}
/**
* Sets up a complete OpenCms instance with configuration from the config-ori folder,
* creating the usual projects, and importing a default database.<p>
*
* @param importFolder the folder to import in the "real" FS
* @param targetFolder the target folder of the import in the VFS
* @return an initialized OpenCms context with "Admin" user in the "Offline" project with the site root set to "/"
*/
public static CmsObject setupOpenCms(String importFolder, String targetFolder) {
return setupOpenCms(importFolder, targetFolder, getTestDataPath("WEB-INF/config." + m_dbProduct + "/"), true);
}
/**
* Sets up a complete OpenCms instance with configuration from the config-ori folder,
* creating the usual projects, and importing a default database.<p>
*
* @param importFolder the folder to import in the "real" FS
* @param targetFolder the target folder of the import in the VFS
* @param publish flag to signalize if the publish script should be called
* @return an initialized OpenCms context with "Admin" user in the "Offline" project with the site root set to "/"
*/
public static CmsObject setupOpenCms(String importFolder, String targetFolder, boolean publish) {
return setupOpenCms(importFolder, targetFolder, getTestDataPath("WEB-INF/config." + m_dbProduct + "/"), publish);
}
/**
* Sets up a complete OpenCms instance with configuration from the config-ori folder,
* creating the usual projects, and importing a default database.<p>
*
* @param importFolder the folder to import in the "real" FS
* @param targetFolder the target folder of the import in the VFS
* @param specialConfigFolder the folder that contains the special configuration files for this setup
* @return an initialized OpenCms context with "Admin" user in the "Offline" project with the site root set to "/"
*/
public static CmsObject setupOpenCms(String importFolder, String targetFolder, String specialConfigFolder) {
return setupOpenCms(
importFolder,
targetFolder,
getTestDataPath("WEB-INF/config." + m_dbProduct + "/"),
getTestDataPath(specialConfigFolder),
true);
}
/**
* Sets up a complete OpenCms instance, creating the usual projects,
* and importing a default database.<p>
*
* @param importFolder the folder to import in the "real" FS
* @param targetFolder the target folder of the import in the VFS
* @param configFolder the folder to copy the configuration files
* @param publish publish only if set
*
* @return an initialized OpenCms context with "Admin" user in the "Offline" project with the site root set to "/"
*/
public static CmsObject setupOpenCms(String importFolder, String targetFolder, String configFolder, boolean publish) {
return setupOpenCms(importFolder, targetFolder, configFolder, null, publish);
}
/**
* Sets up a complete OpenCms instance, creating the usual projects,
* and importing a default database.<p>
*
* @param importFolder the folder to import in the "real" FS
* @param targetFolder the target folder of the import in the VFS
* @param configFolder the folder to copy the standard configuration files from
* @param specialConfigFolder the folder that contains the special configuration fiiles for this setup
* @param publish publish only if set
*
* @return an initialized OpenCms context with "Admin" user in the "Offline" project with the site root set to "/"
*/
public static CmsObject setupOpenCms(
String importFolder,
String targetFolder,
String configFolder,
String specialConfigFolder,
boolean publish) {
// intialize a new resource storage
m_resourceStorages = new HashMap<String, OpenCmsTestResourceStorage>();
// turn off exceptions after error logging during setup (won't work otherwise)
OpenCmsTestLogAppender.setBreakOnError(false);
// output a message
System.out.println("\n\n\n----- Starting test case: Importing OpenCms VFS data -----");
// kill any old shell that might have remained from a previous test
if (m_shell != null) {
try {
m_shell.exit();
m_shell = null;
} catch (Throwable t) {
// ignore
}
}
// create the OpenCms "config" folder
File configFile = new File(m_testDataPath.get(0)
+ "WEB-INF"
+ File.separator
+ CmsSystemInfo.FOLDER_CONFIG_DEFAULT);
if (!configFile.exists()) {
configFile.mkdir();
}
// copy the configuration files from the base folder
copyConfiguration(getTestDataPath("WEB-INF/base/"));
// copy the special configuration files from the database folder
copyConfiguration(configFolder);
// copy the configuration files from the special individual folder if required
if (specialConfigFolder != null) {
copyConfiguration(specialConfigFolder);
}
// create a new database first
setupDatabase();
// create a shell instance
m_shell = new CmsShell(getTestDataPath("WEB-INF" + File.separator), null, null, "${user}@${project}>", null);
// open the test script
File script;
FileInputStream stream = null;
CmsObject cms = null;
try {
// start the shell with the base script
script = new File(getTestDataPath("scripts/script_base.txt"));
stream = new FileInputStream(script);
m_shell.start(stream);
// add the default folders by script
script = new File(getTestDataPath("scripts/script_default_folders.txt"));
stream = new FileInputStream(script);
m_shell.start(stream);
// log in the Admin user and switch to the setup project
cms = OpenCms.initCmsObject(OpenCms.getDefaultUsers().getUserGuest());
cms.loginUser("Admin", "admin");
cms.getRequestContext().setCurrentProject(cms.readProject("_setupProject"));
if (importFolder != null) {
// import the "simpletest" files
importResources(cms, importFolder, targetFolder);
}
// create the default projects by script
script = new File(getTestDataPath("scripts/script_default_projects.txt"));
stream = new FileInputStream(script);
m_shell.start(stream);
if (publish) {
// publish the current project by script
script = new File(getTestDataPath("scripts/script_publish.txt"));
stream = new FileInputStream(script);
m_shell.start(stream);
OpenCms.getPublishManager().waitWhileRunning();
} else {
cms.unlockProject(cms.readProject("_setupProject").getUuid());
}
// switch to the "Offline" project
cms.getRequestContext().setCurrentProject(cms.readProject("Offline"));
cms.getRequestContext().setSiteRoot("/sites/default/");
// output a message
System.out.println("----- Starting test cases -----");
} catch (Throwable t) {
t.printStackTrace(System.err);
fail("Unable to setup OpenCms\n" + CmsException.getStackTraceAsString(t));
}
// turn on exceptions after error logging
OpenCmsTestLogAppender.setBreakOnError(true);
// return the initialized cms context Object
return cms;
}
/**
* Adds an additional path to the list of test data configuration files.<p>
*
* @param dataPath the path to add
*/
protected static synchronized void addTestDataPath(String dataPath) {
// check if the db data folder is available
File testDataFolder = new File(dataPath);
if (!testDataFolder.exists()) {
fail("DB setup data not available at " + testDataFolder.getAbsolutePath());
}
String path = CmsFileUtil.normalizePath(testDataFolder.getAbsolutePath() + File.separator);
if (!m_testDataPath.contains(path)) {
m_testDataPath.add(path);
}
}
/**
* Check the setup DB for errors that might have occurred.<p>
*
* @param setupDb the setup DB object to check
*/
protected static void checkErrors(CmsSetupDb setupDb) {
if (!setupDb.noErrors()) {
List<String> errors = setupDb.getErrors();
for (Iterator<String> i = errors.iterator(); i.hasNext();) {
String error = i.next();
System.out.println(error);
}
fail(setupDb.getErrors().get(0));
}
}
/**
* Returns an initialized replacer map.<p>
*
* @param connectionData the connection data to derive the replacer information
*
* @return an initialized replacer map
*/
protected static Map<String, String> getReplacer(ConnectionData connectionData) {
Map<String, String> replacer = new HashMap<String, String>();
replacer.put("${database}", connectionData.m_dbName);
replacer.put("${user}", connectionData.m_userName);
replacer.put("${password}", connectionData.m_userPassword);
replacer.put("${defaultTablespace}", m_defaultTablespace);
replacer.put("${indexTablespace}", m_indexTablespace);
replacer.put("${temporaryTablespace}", m_tempTablespace);
return replacer;
}
/**
* Returns the path to the data files used by the setup wizard.<p>
*
* Whenever possible use this path to ensure that the files
* used for testing are actually the same as for the setup.<p>
*
* @return the path to the data files used by the setup wizard
*/
protected static synchronized String getSetupDataPath() {
if (m_setupDataPath == null) {
// check if the db setup files are available
File setupDataFolder = new File(OpenCmsTestProperties.getInstance().getTestWebappPath());
if (!setupDataFolder.exists()) {
fail("DB setup data not available at " + setupDataFolder.getAbsolutePath());
}
m_setupDataPath = setupDataFolder.getAbsolutePath() + File.separator;
}
// return the path name
return m_setupDataPath;
}
/**
* Returns an initialized DB setup object.<p>
*
* @param connection the connection data
*
* @return the initialized setup DB object
*/
protected static CmsSetupDb getSetupDb(ConnectionData connection) {
// create setup DB instance
CmsSetupDb setupDb = new CmsSetupDb(getSetupDataPath());
// connect to the DB
setupDb.setConnection(
connection.m_jdbcDriver,
connection.m_jdbcUrl,
connection.m_jdbcUrlParams,
connection.m_userName,
connection.m_userPassword);
// check for errors
if (!DB_ORACLE.equals(m_dbProduct)) {
checkErrors(setupDb);
}
return setupDb;
}
/**
* Returns the path to a file in the test data configuration,
* or <code>null</code> if the given file can not be found.<p>
*
* This methods searches the given file in all configured test data paths.
* It returns the file found first.<p>
*
* @param filename the file name to look up
* @return the path to a file in the test data configuration
*/
protected static String getTestDataPath(String filename) {
for (int i = 0; i < m_testDataPath.size(); i++) {
String path = m_testDataPath.get(i);
File file = new File(path + filename);
if (file.exists()) {
if (file.isDirectory()) {
return CmsFileUtil.normalizePath(file.getAbsolutePath() + File.separator);
} else {
return CmsFileUtil.normalizePath(file.getAbsolutePath());
}
}
}
return null;
}
/**
* Imports a resource into the Cms.<p>
*
* @param cms an initialized CmsObject
* @param importFile the name (absolute Path) of the import resource (zip or folder)
* @param targetPath the name (absolute Path) of the target folder in the VFS
* @throws CmsException if something goes wrong
*/
protected static void importResources(CmsObject cms, String importFile, String targetPath) throws CmsException {
OpenCms.getImportExportManager().importData(
cms,
new CmsShellReport(cms.getRequestContext().getLocale()),
new CmsImportParameters(
getTestDataPath(File.separator + "imports" + File.separator + importFile),
targetPath,
true));
}
/**
* Imports a resource from the RFS test directories to the VFS.<p>
*
* The imported resource will be automatically unlocked.<p>
*
* @param cms the current users OpenCms context
* @param rfsPath the RTF path of the resource to import, must be a path accessibly by the current class loader
* @param vfsPath the VFS path for the imported resource
* @param type the type for the imported resource
* @param properties the properties for the imported resource
* @return the imported resource
*
* @throws Exception if the import fails
*/
protected static CmsResource importTestResource(
CmsObject cms,
String rfsPath,
String vfsPath,
int type,
List<CmsProperty> properties) throws Exception {
byte[] content = CmsFileUtil.readFile(rfsPath);
CmsResource result = cms.createResource(vfsPath, type, content, properties);
cms.unlockResource(vfsPath);
return result;
}
/**
* Removes the OpenCms database test instance.<p>
*/
protected static void removeDatabase() {
if (m_defaultConnection != null) {
removeDatabase(m_setupConnection, m_defaultConnection, false);
}
if (m_additionalConnection != null) {
removeDatabase(m_setupConnection, m_additionalConnection, false);
}
}
/**
* Removes the OpenCms database test instance.<p>
*
* @param setupConnection the setup connection
* @param defaultConnection the default connection
* @param handleErrors flag to indicate if errors should be handled/checked
*/
protected static void removeDatabase(
ConnectionData setupConnection,
ConnectionData defaultConnection,
boolean handleErrors) {
CmsSetupDb setupDb = null;
boolean noErrors = true;
try {
setupDb = getSetupDb(defaultConnection);
setupDb.dropTables(m_dbProduct, getReplacer(defaultConnection), handleErrors);
noErrors = setupDb.noErrors();
} catch (Exception e) {
noErrors = false;
} finally {
if (setupDb != null) {
setupDb.closeConnection();
}
}
if (!handleErrors || noErrors) {
try {
setupDb = getSetupDb(setupConnection);
setupDb.dropDatabase(m_dbProduct, getReplacer(defaultConnection), handleErrors);
setupDb.closeConnection();
} catch (Exception e) {
noErrors = false;
} finally {
if (setupDb != null) {
setupDb.closeConnection();
}
}
}
if (handleErrors) {
checkErrors(setupDb);
}
}
/**
* Creates a new OpenCms test database including the tables.<p>
*
* Any existing instance of the test database is forcefully removed first.<p>
*/
protected static void setupDatabase() {
if (m_defaultConnection != null) {
setupDatabase(m_setupConnection, m_defaultConnection, true);
}
if (m_additionalConnection != null) {
setupDatabase(m_setupConnection, m_additionalConnection, true);
}
}
/**
* Creates a new OpenCms test database including the tables.<p>
*
* @param setupConnection the setup connection
* @param defaultConnection the default connection
* @param handleErrors flag to indicate if errors should be handled/checked
*/
protected static void setupDatabase(
ConnectionData setupConnection,
ConnectionData defaultConnection,
boolean handleErrors) {
CmsSetupDb setupDb = null;
boolean noErrors = true;
try {
setupDb = getSetupDb(setupConnection);
setupDb.createDatabase(m_dbProduct, getReplacer(defaultConnection), handleErrors);
noErrors = setupDb.noErrors();
setupDb.closeConnection();
} catch (Exception e) {
noErrors = false;
} finally {
if (setupDb != null) {
setupDb.closeConnection();
}
}
if (!handleErrors || noErrors) {
try {
setupDb = getSetupDb(defaultConnection);
setupDb.createTables(m_dbProduct, getReplacer(defaultConnection), handleErrors);
noErrors = setupDb.noErrors();
setupDb.closeConnection();
} catch (Exception e) {
noErrors = false;
} finally {
if (setupDb != null) {
setupDb.closeConnection();
}
}
}
if (noErrors) {
return;
} else if (handleErrors) {
removeDatabase(setupConnection, defaultConnection, false);
setupDatabase(setupConnection, defaultConnection, false);
} else {
checkErrors(setupDb);
}
}
/**
* Compares two lists of CmsProperty objects and creates a list of all properties which are
* not included in a seperate exclude list.
* @param cms the CmsObject
* @param resourceName the name of the resource the properties belong to
* @param storedResource the stored resource corresponding to the resourcename
* @param excludeList the list of properies to exclude in the test or null
* @return string of non matching properties
* @throws CmsException if something goes wrong
*/
private static String compareProperties(
CmsObject cms,
String resourceName,
OpenCmsTestResourceStorageEntry storedResource,
List<CmsProperty> excludeList) throws CmsException {
String noMatches = "";
List<CmsProperty> storedProperties = storedResource.getProperties();
List<CmsProperty> properties = cms.readPropertyObjects(resourceName, false);
List<CmsProperty> unmatchedProperties;
unmatchedProperties = OpenCmsTestResourceFilter.compareProperties(storedProperties, properties, excludeList);
if (unmatchedProperties.size() > 0) {
noMatches += "[Properies missing " + unmatchedProperties.toString() + "]\n";
}
unmatchedProperties = OpenCmsTestResourceFilter.compareProperties(properties, storedProperties, excludeList);
if (unmatchedProperties.size() > 0) {
noMatches += "[Properies additional " + unmatchedProperties.toString() + "]\n";
}
return noMatches;
}
/**
* Copies the configuration files from the given folder to the "config" folder.
*
* @param newConfig the folder with the configuration files to copy
*/
private static void copyConfiguration(String newConfig) {
File configDir = new File(getTestDataPath("WEB-INF" + File.separatorChar + CmsSystemInfo.FOLDER_CONFIG_DEFAULT));
File configOriDir = new File(newConfig);
FileFilter filter = FileFilterUtils.orFileFilter(
FileFilterUtils.suffixFileFilter(".xml"),
FileFilterUtils.suffixFileFilter(".properties"));
if (configOriDir.exists()) {
File[] oriFiles = configOriDir.listFiles(filter);
boolean initConfigDates = false;
if (m_dateConfigFiles == null) {
m_dateConfigFiles = new long[oriFiles.length];
initConfigDates = true;
}
for (int i = 0; i < oriFiles.length; i++) {
File source = oriFiles[i];
if (source.isFile()) {
// only copy files
String sourceName = source.getAbsolutePath();
File target = new File(configDir, source.getName());
if (initConfigDates) {
m_dateConfigFiles[i] = target.lastModified();
}
String targetName = target.getAbsolutePath();
try {
CmsFileUtil.copy(sourceName, targetName);
target.setLastModified(m_dateConfigFiles[i]);
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
/**
* Compares an access control entry of a resource with a given access control entry.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param ace the access control entry to compare or null if to compare with the stored values
*/
public void assertAce(CmsObject cms, String resourceName, CmsAccessControlEntry ace) {
try {
// create the exclude list
List<CmsAccessControlEntry> excludeList = new ArrayList<CmsAccessControlEntry>();
if (ace != null) {
excludeList.add(ace);
}
// get the stored resource
OpenCmsTestResourceStorageEntry storedResource = m_currentResourceStrorage.get(resourceName);
String noMatches = compareAccessEntries(cms, resourceName, storedResource, excludeList);
// now see if we have collected any no-matches
if (noMatches.length() > 0) {
fail("error comparing ace of resource " + resourceName + " with stored values: " + noMatches);
}
if (ace != null) {
List<CmsAccessControlEntry> resAces = cms.getAccessControlEntries(resourceName);
boolean notFound = true;
Iterator<CmsAccessControlEntry> i = resAces.iterator();
while (i.hasNext()) {
CmsAccessControlEntry resAce = i.next();
if (resAce.getPrincipal().equals(ace.getPrincipal())
&& (resAce.getResource().equals(ace.getResource()))) {
notFound = false;
if (!resAce.equals(ace)) {
fail("[ACE " + ace + " != " + resAce + "]");
}
}
}
if (notFound) {
fail("[ACE not found" + ace + "]");
}
}
} catch (Exception e) {
e.printStackTrace();
fail("cannot read resource " + resourceName + " " + e.getMessage());
}
}
/**
* Compares an access control list of a resource with a given access control permission.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param principal the principal of the permission set or null if to compare with the stored values
* @param permission the permission set to compare
*/
public void assertAcl(CmsObject cms, String resourceName, CmsUUID principal, CmsPermissionSet permission) {
try {
// create the exclude list
List<CmsUUID> excludeList = new ArrayList<CmsUUID>();
if (permission != null) {
excludeList.add(principal);
}
// get the stored resource
OpenCmsTestResourceStorageEntry storedResource = m_currentResourceStrorage.get(resourceName);
String noMatches = compareAccessLists(cms, resourceName, storedResource, excludeList);
// now see if we have collected any no-matches
if (noMatches.length() > 0) {
fail("error comparing permission sets of resource "
+ resourceName
+ " with stored values: "
+ noMatches);
}
if (permission != null) {
CmsAccessControlList resAcls = cms.getAccessControlList(resourceName);
Map<CmsUUID, CmsPermissionSetCustom> permissionMap = resAcls.getPermissionMap();
CmsPermissionSet resPermission = permissionMap.get(principal);
if (resPermission != null) {
if (!resPermission.equals(permission)) {
fail("[Permission set not equal " + principal + ":" + permission + " != " + resPermission + "]");
}
} else {
fail("[Permission set not found " + principal + ":" + permission + "]");
}
}
} catch (Exception e) {
fail("cannot read resource " + resourceName + " " + e.getMessage());
}
}
/**
* Compares an access control list of a resource with a given access control permission.<p>
*
* @param cms the CmsObject
* @param modifiedResource the name of the which had its permissions changed
* @param resourceName the name of the resource to compare
* @param principal the principal of the permission set or null if to compare with the stored values
* @param permission the permission set to compare
*/
public void assertAcl(
CmsObject cms,
String modifiedResource,
String resourceName,
CmsUUID principal,
CmsPermissionSet permission) {
try {
// create the exclude list
List<CmsUUID> excludeList = new ArrayList<CmsUUID>();
if (permission != null) {
excludeList.add(principal);
}
// TODO: This is the code to recalculate the permission set if necessary. Its not completed yet!
Map<CmsUUID, String> parents = getParents(cms, resourceName);
List<CmsAccessControlEntry> aceList = cms.getAccessControlEntries(resourceName);
Iterator<CmsAccessControlEntry> i = aceList.iterator();
while (i.hasNext()) {
CmsAccessControlEntry ace = i.next();
if (ace.getPrincipal().equals(principal)) {
String parent = parents.get(ace.getResource());
if ((!parent.equals(modifiedResource)) && (parent.length() > modifiedResource.length())) {
permission = new CmsPermissionSet(ace.getAllowedPermissions(), ace.getDeniedPermissions());
}
}
}
// get the stored resource
OpenCmsTestResourceStorageEntry storedResource = m_currentResourceStrorage.get(resourceName);
String noMatches = compareAccessLists(cms, resourceName, storedResource, excludeList);
// now see if we have collected any no-matches
if (noMatches.length() > 0) {
fail("error comparing permission sets of resource "
+ resourceName
+ " with stored values: "
+ noMatches);
}
if (permission != null) {
CmsAccessControlList resAcls = cms.getAccessControlList(resourceName);
Map<CmsUUID, CmsPermissionSetCustom> permissionMap = resAcls.getPermissionMap();
CmsPermissionSet resPermission = permissionMap.get(principal);
if (resPermission != null) {
if (!resPermission.equals(permission)) {
fail("[Permission set not equal " + principal + ":" + permission + " != " + resPermission + "]");
}
} else {
fail("[Permission set not found " + principal + ":" + permission + "]");
}
}
} catch (Exception e) {
fail("cannot read resource " + resourceName + " " + e.getMessage());
}
}
/**
* Tests if a pattern can be found in a content string.<p>
* Fails if the pattern is not found.
*
* @param content the content string
* @param pattern the pattern to search for
*/
public void assertContains(String content, String pattern) {
if (content.toLowerCase().indexOf(pattern.toLowerCase()) == -1) {
fail("pattern '" + pattern + "' not found in content");
}
}
/**
* Tests if a pattern cannot be found in a content string.<p>
* Fails if the pattern is found.
*
* @param content the content string
* @param pattern the pattern to search for
*/
public void assertContainsNot(String content, String pattern) {
if (content.toLowerCase().indexOf(pattern.toLowerCase()) != -1) {
fail("pattern '" + pattern + "' found in content");
}
}
/**
* Compares the current content of a (file) resource with a given content.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param content the content to compare
*/
public void assertContent(CmsObject cms, String resourceName, byte[] content) {
try {
// get the actual resource from the vfs
CmsFile file = cms.readFile(resourceName, CmsResourceFilter.ALL);
byte[] fileContent = file.getContents();
if (fileContent.length != file.getLength()) {
fail("[Content length stored " + file.getContents().length + " != " + file.getLength() + "]");
}
if (fileContent.length != content.length) {
fail("[Content length compared " + file.getContents().length + " != " + content.length + "]");
}
for (int i = 0; i < content.length; i++) {
if (fileContent[i] != content[i]) {
fail("[Content compare failed at index " + i + "]");
}
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Tests if the current content date of a resource is equals to the given date.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param dateContent the content date
*/
public void assertDateContent(CmsObject cms, String resourceName, long dateContent) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (res.getDateContent() != dateContent) {
fail("[DateContent "
+ dateContent
+ " i.e. "
+ CmsDateUtil.getHeaderDate(dateContent)
+ " != "
+ res.getDateContent()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateContent())
+ "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Tests if the the current date content of a resource is later than the given date.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param dateContent the content date
*/
public void assertDateContentAfter(CmsObject cms, String resourceName, long dateContent) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (res.getDateContent() < dateContent) {
fail("[DateContent "
+ dateContent
+ " i.e. "
+ CmsDateUtil.getHeaderDate(dateContent)
+ " > "
+ res.getDateContent()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateContent())
+ "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Compares the current date created of a resource with a given date.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param dateCreated the creation date
*/
public void assertDateCreated(CmsObject cms, String resourceName, long dateCreated) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (res.getDateCreated() != dateCreated) {
fail("[DateCreated "
+ dateCreated
+ " i.e. "
+ CmsDateUtil.getHeaderDate(dateCreated)
+ " != "
+ res.getDateCreated()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateCreated())
+ "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Tests if the the creation date of a resource is later then a given date.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param dateCreated the creation date
*/
public void assertDateCreatedAfter(CmsObject cms, String resourceName, long dateCreated) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (res.getDateCreated() < dateCreated) {
fail("[DateCreated "
+ dateCreated
+ " i.e. "
+ CmsDateUtil.getHeaderDate(dateCreated)
+ " > "
+ res.getDateCreated()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateCreated())
+ "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Compares the current expiration date of a resource with a given date.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param dateExpired the expiration date
*/
public void assertDateExpired(CmsObject cms, String resourceName, long dateExpired) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (res.getDateExpired() != dateExpired) {
fail("[DateExpired "
+ dateExpired
+ " i.e. "
+ CmsDateUtil.getHeaderDate(dateExpired)
+ " != "
+ res.getDateExpired()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateExpired())
+ "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Compares the current date last modified of a resource with a given date.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param dateLastModified the last modification date
*/
public void assertDateLastModified(CmsObject cms, String resourceName, long dateLastModified) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (res.getDateLastModified() != dateLastModified) {
fail("[DateLastModified "
+ dateLastModified
+ " i.e. "
+ CmsDateUtil.getHeaderDate(dateLastModified)
+ " != "
+ res.getDateLastModified()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateLastModified())
+ "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Tests if the the current date last modified of a resource is later than the given date.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param dateLastModified the last modification date
*/
public void assertDateLastModifiedAfter(CmsObject cms, String resourceName, long dateLastModified) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (res.getDateLastModified() < dateLastModified) {
fail("[DateLastModified "
+ dateLastModified
+ " i.e. "
+ CmsDateUtil.getHeaderDate(dateLastModified)
+ " > "
+ res.getDateLastModified()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateLastModified())
+ "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Compares the current release date of a resource with a given date.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param dateReleased the release date
*/
public void assertDateReleased(CmsObject cms, String resourceName, long dateReleased) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (res.getDateReleased() != dateReleased) {
fail("[DateReleased "
+ dateReleased
+ " i.e. "
+ CmsDateUtil.getHeaderDate(dateReleased)
+ " != "
+ res.getDateReleased()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateReleased())
+ "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Tests if the given exceptions are equal (or both null).<p>
*
* @param e1 first exception to compare
* @param e2 second exception to compare
*/
public void assertEquals(CmsException e1, CmsException e2) {
if ((e1 == null) && (e2 == null)) {
return;
}
if (((e1 == null) && (e2 != null)) || ((e1 != null) && (e2 == null))) {
fail("Exceptions not equal (not both null)");
}
if ((e1 != null) && (e2 != null)) {
if (!(e1.getClass().equals(e2.getClass()))) {
fail("Exception " + e1.toString() + " does not equal " + e2.toString());
}
if (!(e1.getMessageContainer().getKey().equals(e2.getMessageContainer().getKey()))) {
fail("Exception " + e1.toString() + " does not equal " + e2.toString());
}
}
}
/**
* Tests if the given jobs are internally equal.<p>
* (May have different wrapper classes)
*
* @param j1 first job to compare
* @param j2 second job to compare
* @param comparePublishLists if the publish lists should be compared, too
* @param compareTime if the timestamps should be compared, too
*/
public void assertEquals(
CmsPublishJobBase j1,
CmsPublishJobBase j2,
boolean comparePublishLists,
boolean compareTime) {
CmsPublishJobInfoBean job1 = new OpenCmsTestPublishJobBase(j1).getInfoBean();
CmsPublishJobInfoBean job2 = new OpenCmsTestPublishJobBase(j2).getInfoBean();
if (!(job1.getPublishHistoryId().equals(job2.getPublishHistoryId())
&& job1.getProjectName().equals(job2.getProjectName())
&& job1.getUserId().equals(job2.getUserId())
&& job1.getLocale().equals(job2.getLocale())
&& (job1.getFlags() == job2.getFlags()) && (job1.getSize() == job2.getSize()))) {
fail("Publish jobs are not equal");
}
if (compareTime) {
if (!((job1.getEnqueueTime() == job2.getEnqueueTime()) && (job1.getStartTime() == job2.getStartTime()) && (job1.getFinishTime() == job2.getFinishTime()))) {
fail("Publish jobs do not have the same timestamps");
}
}
if (comparePublishLists) {
if (!job1.getPublishList().toString().equals(job2.getPublishList().toString())) {
fail("Publish jobs do not have the same publish list");
}
}
}
/**
* Tests if the given xml document objects are equals (or both null).<p>
*
* @param d1 first document to compare
* @param d2 second document to compare
*/
public void assertEquals(Document d1, Document d2) {
if ((d1 == null) && (d2 == null)) {
return;
}
if (((d1 == null) && (d2 != null)) || ((d1 != null) && (d2 == null))) {
fail("Documents not equal (not both null)");
}
if ((d1 != null) && (d2 != null)) {
InternalNodeComparator comparator = new InternalNodeComparator();
if (comparator.compare((Node)d1, (Node)d2) != 0) {
fail("Comparison of documents failed: "
+ "name = "
+ d1.getName()
+ ", "
+ "path = "
+ comparator.m_node1.getUniquePath()
+ "\nNode 1:"
+ comparator.m_node1.asXML()
+ "\nNode 2:"
+ comparator.m_node2.asXML());
}
}
}
/**
* Compares a given resource to its stored version containing the state before a CmsObject
* method was called.<p>
*
* @param cms the CmsObject
* @param resource the resource to compare
* @param filter the filter contianing the flags defining which attributes to compare
*/
public void assertFilter(CmsObject cms, CmsResource resource, OpenCmsTestResourceFilter filter) {
try {
// get the stored resource
OpenCmsTestResourceStorageEntry storedResource = m_currentResourceStrorage.get(cms.getSitePath(resource));
// compare the current resource with the stored resource
assertFilter(cms, storedResource, resource, filter);
} catch (Exception e) {
fail("cannot read resource " + cms.getSitePath(resource) + " " + e.getMessage());
}
}
/**
* Compares a stored Cms resource with another Cms resource instance using a specified filter.<p>
*
* @param cms the current user's Cms object
* @param storedResource a stored Cms resource representing the state before an operation
* @param res a Cms resource representing the state after an operation
* @param filter a filter to compare both resources
*/
public void assertFilter(
CmsObject cms,
OpenCmsTestResourceStorageEntry storedResource,
CmsResource res,
OpenCmsTestResourceFilter filter) {
String noMatches = null;
String resourceName = null;
try {
noMatches = "";
resourceName = cms.getRequestContext().removeSiteRoot(res.getRootPath());
// compare the contents if necessary
if (filter.testContents()) {
byte[] contents;
// we only have to do this when comparing files
if (res.isFile()) {
contents = cms.readFile(resourceName, CmsResourceFilter.ALL).getContents();
if (!new String(storedResource.getContents()).equals(new String(contents))) {
noMatches += "[Content does not match]\n";
}
contents = null;
}
}
// compare the date content if necessary
if (filter.testDateContent()) {
if (storedResource.getDateContent() != res.getDateContent()) {
noMatches += "[DateContent "
+ storedResource.getDateContent()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(storedResource.getDateContent())
+ " != "
+ res.getDateContent()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateContent())
+ "]\n";
}
}
// compare the date created if necessary
if (filter.testDateCreated()) {
if (storedResource.getDateCreated() != res.getDateCreated()) {
noMatches += "[DateCreated "
+ storedResource.getDateCreated()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(storedResource.getDateCreated())
+ " != "
+ res.getDateCreated()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateCreated())
+ "]\n";
}
}
if (filter.testDateCreatedSec()) {
if ((storedResource.getDateCreated() / 1000) != (res.getDateCreated() / 1000)) {
noMatches += "[DateCreated "
+ storedResource.getDateCreated()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(storedResource.getDateCreated())
+ " != "
+ res.getDateCreated()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateCreated())
+ "]\n";
}
}
// compare the date expired if necessary
if (filter.testDateExpired()) {
if (storedResource.getDateExpired() != res.getDateExpired()) {
noMatches += "[DateExpired "
+ storedResource.getDateExpired()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(storedResource.getDateExpired())
+ " != "
+ res.getDateExpired()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateExpired())
+ "]\n";
}
}
// compare the date last modified if necessary
if (filter.testDateLastModified()) {
if (storedResource.getDateLastModified() != res.getDateLastModified()) {
noMatches += "[DateLastModified "
+ storedResource.getDateLastModified()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(storedResource.getDateLastModified())
+ " != "
+ res.getDateLastModified()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateLastModified())
+ "]\n";
}
}
if (filter.testDateLastModifiedSec()) {
if ((storedResource.getDateLastModified() / 1000) != (res.getDateLastModified() / 1000)) {
noMatches += "[DateLastModified "
+ storedResource.getDateLastModified()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(storedResource.getDateLastModified())
+ " != "
+ res.getDateLastModified()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateLastModified())
+ "]\n";
}
}
// compare the date last released if necessary
if (filter.testDateReleased()) {
if (storedResource.getDateReleased() != res.getDateReleased()) {
noMatches += "[DateReleased "
+ storedResource.getDateReleased()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(storedResource.getDateReleased())
+ " != "
+ res.getDateReleased()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateReleased())
+ "]\n";
}
}
// compare the flags if necessary
if (filter.testFlags()) {
if (storedResource.getFlags() != res.getFlags()) {
noMatches += "[Flags " + storedResource.getFlags() + " != " + res.getFlags() + "]\n";
}
}
// compare the length if necessary
if (filter.testLength()) {
if (storedResource.getLength() != res.getLength()) {
noMatches += "[Length " + storedResource.getLength() + " != " + res.getLength() + "]\n";
}
}
// compare the sibling count if necessary
if (filter.testSiblingCount()) {
if (storedResource.getSiblingCount() != res.getSiblingCount()) {
noMatches += "[SiblingCount "
+ storedResource.getSiblingCount()
+ " != "
+ res.getSiblingCount()
+ "]\n";
}
}
// compare the lockstate if necessary
if (filter.testLock()) {
CmsLock resLock = cms.getLock(res);
if (filter.testName()) {
if (!storedResource.getLock().equals(resLock)) {
noMatches += "[Lockstate " + storedResource.getLock() + " != " + resLock + "]\n";
}
} else {
CmsLock other = storedResource.getLock();
if (!other.getUserId().equals(resLock.getUserId())
|| !other.getProjectId().equals(resLock.getProjectId())
|| !other.getType().equals(resLock.getType())) {
noMatches += "[Lockstate " + storedResource.getLock() + " != " + resLock + "]\n";
}
}
}
// compare the name if necessary
if (filter.testName()) {
if (!storedResource.getName().equals(res.getName())) {
noMatches += "[Name " + storedResource.getName() + " != " + res.getName() + "]\n";
}
}
// compare the project last modified if necessary
if (filter.testProjectLastModified()) {
if (!storedResource.getProjectLastModified().equals(res.getProjectLastModified())) {
noMatches += "[ProjectLastModified "
+ storedResource.getProjectLastModified()
+ " != "
+ res.getProjectLastModified()
+ "]\n";
}
}
// compare the properties if necessary
if (filter.testProperties()) {
noMatches += compareProperties(cms, resourceName, storedResource, null);
}
// compare the acl if necessary
if (filter.testAcl()) {
// compare the ACLs
noMatches += compareAccessLists(cms, resourceName, storedResource, null);
}
// compare the ace if necessary
if (filter.testAce()) {
// compate the ACEs
noMatches += compareAccessEntries(cms, resourceName, storedResource, null);
}
// compare the resource id if necessary
if (filter.testResourceId()) {
if (!storedResource.getResourceId().equals(res.getResourceId())) {
noMatches += "[ResourceId " + storedResource.getResourceId() + " != " + res.getResourceId() + "]\n";
}
}
// compare the state if necessary
if (filter.testState()) {
if (!storedResource.getState().equals(res.getState())) {
noMatches += "[State " + storedResource.getState() + " != " + res.getState() + "]\n";
}
}
// compare the structure id if necessary
if (filter.testStructureId()) {
if (!storedResource.getStructureId().equals(res.getStructureId())) {
noMatches += "[StructureId "
+ storedResource.getStructureId()
+ " != "
+ res.getStructureId()
+ "]\n";
}
}
// compare the touched flag if necessary
if (filter.testTouched()) {
if (storedResource.isTouched() != res.isTouched()) {
noMatches += "[Touched " + storedResource.isTouched() + " != " + res.isTouched() + "]\n";
}
}
// compare the type if necessary
if (filter.testType()) {
if (storedResource.getType() != res.getTypeId()) {
noMatches += "[Type " + storedResource.getType() + " != " + res.getTypeId() + "]\n";
}
}
// compare the user created if necessary
if (filter.testUserCreated()) {
if (!storedResource.getUserCreated().equals(res.getUserCreated())) {
noMatches += createUserFailMessage(
cms,
"UserCreated",
storedResource.getUserLastModified(),
res.getUserLastModified());
noMatches += "\n";
}
}
// compare the user created if necessary
if (filter.testUserLastModified()) {
if (!storedResource.getUserLastModified().equals(res.getUserLastModified())) {
noMatches += createUserFailMessage(
cms,
"UserLastModified",
storedResource.getUserLastModified(),
res.getUserLastModified());
noMatches += "\n";
}
}
// now see if we have collected any no-matches
if (noMatches.length() > 0) {
fail("error comparing resource " + resourceName + " with stored values:\n" + noMatches);
}
} catch (CmsException e) {
fail("cannot assert filter " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Compares a resource to its stored version containing the state before a CmsObject
* method was called.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param filter the filter contianing the flags defining which attributes to compare
*
* @throws CmsException if something goes wrong
*/
public void assertFilter(CmsObject cms, String resourceName, OpenCmsTestResourceFilter filter) throws CmsException {
// get the stored resource
OpenCmsTestResourceStorageEntry storedResource = null;
try {
storedResource = m_currentResourceStrorage.get(resourceName);
} catch (Exception e) {
fail(e.getMessage());
}
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
// compare the current resource with the stored resource
assertFilter(cms, storedResource, res, filter);
}
/**
* Compares a resource to another given resource using a specified filter.<p>
*
* @param cms the current user's Cms object
* @param resourceName1 resource #1
* @param resourceName2 resource #2
* @param filter the filter contianing the flags defining which attributes to compare
*/
public void assertFilter(CmsObject cms, String resourceName1, String resourceName2, OpenCmsTestResourceFilter filter) {
try {
CmsResource res1 = cms.readResource(resourceName1, CmsResourceFilter.ALL);
CmsResource res2 = cms.readResource(resourceName2, CmsResourceFilter.ALL);
// a dummy storage entry gets created here to share existing code
OpenCmsTestResourceStorageEntry dummy = new OpenCmsTestResourceStorageEntry(cms, resourceName2, res2);
assertFilter(cms, dummy, res1, filter);
} catch (CmsException e) {
fail("cannot read either resource "
+ resourceName1
+ " or resource "
+ resourceName2
+ " "
+ CmsException.getStackTraceAsString(e));
}
}
/**
* Tests whether a resource has currently a specified flag set.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param flag a flag to check
*/
public void assertFlags(CmsObject cms, String resourceName, int flag) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
// test if the specified flag is set
if (!((res.getFlags() & flag) > 0)) {
fail("[Flags (" + res.getFlags() + ") do not contain flag (" + flag + ")");
}
} catch (CmsException e) {
fail("Error reading resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Checks if the given resource has the correct history count, also
* check if all entries in the history can be read.<p>
*
* @param cms the current user OpenCms context
* @param resourcename the name of the resource to check the history for
* @param versionCount the expected version number of the resource
*
* @throws Exception if the test fails
*/
public void assertHistory(CmsObject cms, String resourcename, int versionCount) throws Exception {
CmsResource res = cms.readResource(resourcename, CmsResourceFilter.ALL);
// assert we have the right version number
assertEquals(versionCount, res.getVersion());
if (cms.getRequestContext().getCurrentProject().isOnlineProject()) {
// no additional test possible for the online project
return;
}
// read all available versions
List<I_CmsHistoryResource> versions = cms.readAllAvailableVersions(resourcename);
// new files have no historical entry despite the version number may be greater than 1 for siblings
if (res.getState().isNew()) {
assertTrue(versions.isEmpty());
return;
}
// if the resource has not been published yet, the available versions will be one less
boolean unchanged = res.getState().isUnchanged();
// the list is sorted descending, ie. last version is first in list
int count = versionCount - (unchanged ? 0 : 1);
Iterator<I_CmsHistoryResource> i = versions.iterator();
while (i.hasNext()) {
// walk through the list and read all version files
CmsResource hRes = (CmsResource)i.next();
if (hRes instanceof CmsHistoryFile) {
CmsFile hFile = cms.readFile(hRes);
assertEquals(count, hFile.getVersion());
} else {
assertEquals(count, hRes.getVersion());
}
count--;
}
// finally assert the list size if equal to the history version
assertEquals(versionCount - (unchanged ? 0 : 1), versions.size());
}
/**
* Checks if the given resource has the correct history count, also
* check if all entries in the history can be read.<p>
*
* Use this method only for resources that has been restored.<p>
*
* @param cms the current user OpenCms context
* @param resourcename the name of the resource to check the history for
* @param versionCount the expected version number of the resource
*
* @throws Exception if the test fails
*/
public void assertHistoryForRestored(CmsObject cms, String resourcename, int versionCount) throws Exception {
CmsResource res = cms.readResource(resourcename, CmsResourceFilter.ALL);
// assert we have the right version number
assertEquals(versionCount, res.getVersion());
if (cms.getRequestContext().getCurrentProject().isOnlineProject()) {
// no additional test possible for the online project
return;
}
// read all available versions
List<I_CmsHistoryResource> versions = cms.readAllAvailableVersions(resourcename);
// if the resource has not been published yet, the available versions will be one less
boolean unchanged = res.getState().isUnchanged();
// the list is sorted descending, ie. last version is first in list
int count = versionCount - (unchanged ? 0 : 1);
Iterator<I_CmsHistoryResource> i = versions.iterator();
while (i.hasNext()) {
// walk through the list and read all version files
CmsResource hRes = (CmsResource)i.next();
CmsFile hFile = cms.readFile(hRes);
assertEquals(count, hFile.getVersion());
count--;
}
// finally assert the list size if equal to the history version
assertEquals(versionCount - (unchanged ? 0 : 1), versions.size());
}
/**
* Ensures that the given resource is a folder.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to check for a folder
*/
public void assertIsFolder(CmsObject cms, String resourceName) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (!res.isFolder()) {
fail("[Not a folder: " + resourceName + "]");
}
if (res.getLength() != -1) {
fail("[Folder length not -1: " + resourceName + "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Tests if the specified object is identical with another object.<p>
*
* @param o1 an object
* @param o2 another object
*/
public void assertIsIdentical(Object o1, Object o2) {
if (o1 != o2) {
fail("Object " + o1.toString() + " is not identical to " + o2.toString());
}
}
/**
* Tests if the specified object is not identical with another object.<p>
*
* @param o1 an object
* @param o2 another object
*/
public void assertIsNotIdentical(Object o1, Object o2) {
if (o1 == o2) {
fail("Object " + o1.toString() + " is identical to " + o2.toString());
}
}
/**
* Validates if a specified resource is somehow locked to the current user.<p>
*
* @param cms the current user's Cms object
* @param resourceName the name of the resource to validate
*/
public void assertLock(CmsObject cms, String resourceName) {
try {
// get the actual resource from the VFS
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
CmsLock lock = cms.getLock(res);
if (lock.isNullLock() || !lock.isOwnedBy(cms.getRequestContext().getCurrentUser())) {
fail("[Lock "
+ resourceName
+ " requires must be locked to user "
+ cms.getRequestContext().getCurrentUser().getId()
+ "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Validates if a specified resource has a lock of a given type for the current user.<p>
*
* @param cms the current user's Cms object
* @param resourceName the name of the resource to validate
* @param lockType the type of the lock
*
* @see CmsLockType
*/
public void assertLock(CmsObject cms, String resourceName, CmsLockType lockType) {
assertLock(cms, resourceName, lockType, cms.getRequestContext().getCurrentUser());
}
/**
* Validates if a specified resource has a lock of a given type and is locked for a principal.<p>
*
* @param cms the current user's Cms object
* @param resourceName the name of the resource to validate
* @param lockType the type of the lock
* @param user the user to check the lock with
*
* @see CmsLockType
*/
public void assertLock(CmsObject cms, String resourceName, CmsLockType lockType, CmsUser user) {
try {
// get the actual resource from the VFS
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
CmsLock lock = cms.getLock(res);
if (lockType.isUnlocked()) {
if (!lock.isNullLock()) {
fail("[Lock " + resourceName + " must be unlocked]");
}
} else if (lock.isNullLock() || (lock.getType() != lockType) || !lock.isOwnedBy(user)) {
fail("[Lock "
+ resourceName
+ " requires a lock of type "
+ lockType
+ " for user "
+ user.getId()
+ " ("
+ user.getName()
+ ") but has a lock of type "
+ lock.getType()
+ " for user "
+ lock.getUserId()
+ "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Validates the project status of a resource,
* i.e. if a resource has a "red flag" or not.<p>
*
* @param cms the current user's Cms object
* @param resourceName the name of the resource to validate
* @param shouldHaveRedFlag true, if the resource should currently have a red flag
*/
public void assertModifiedInCurrentProject(CmsObject cms, String resourceName, boolean shouldHaveRedFlag) {
boolean hasRedFlag = false;
try {
// get the actual resource from the VFS
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
// the current resource has a red flag if it's state is changed/new/deleted
hasRedFlag = !res.getState().isUnchanged();
// and if it was modified in the current project
hasRedFlag &= (res.getProjectLastModified().equals(cms.getRequestContext().getCurrentProject().getUuid()));
// and if it was modified by the current user
hasRedFlag &= (res.getUserLastModified().equals(cms.getRequestContext().getCurrentUser().getId()));
if (shouldHaveRedFlag && !hasRedFlag) {
// it should have a red flag, but it hasn't
fail("[HasRedFlag " + resourceName + " must have a red flag]");
} else if (hasRedFlag && !shouldHaveRedFlag) {
// it has a red flag, but it shouldn't
fail("[HasRedFlag " + resourceName + " must not have a red flag]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Asserts the given permission string with the access control entry for the given resource and principal.<p>
*
* @param cms the cms object
* @param resourceName the resource name
* @param principal the principal
* @param permissionString the permission string to compare
*
* @throws CmsException if something goes wrong
*/
public void assertPermissionString(
CmsObject cms,
String resourceName,
I_CmsPrincipal principal,
String permissionString) throws CmsException {
Iterator<CmsAccessControlEntry> it = cms.getAccessControlEntries(resourceName).iterator();
while (it.hasNext()) {
CmsAccessControlEntry ace = it.next();
if (ace.getPrincipal().equals(principal.getId())) {
assertEquals(permissionString, ace.getPermissions().getPermissionString()
+ ace.getInheritingString()
+ ace.getResponsibleString());
return;
}
}
if (permissionString != null) {
fail("Ace not found");
}
}
/**
* Compares the current project of a resource with a given CmsProject.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param project the project
*/
public void assertProject(CmsObject cms, String resourceName, CmsProject project) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (!res.getProjectLastModified().equals(project.getUuid())) {
fail("[ProjectLastModified " + project.getUuid() + " != " + res.getProjectLastModified() + "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Compares the current properties of a resource with the stored values and a given, changed property.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param property the changed property
*/
public void assertPropertyChanged(CmsObject cms, String resourceName, CmsProperty property) {
try {
// get the stored resource
OpenCmsTestResourceStorageEntry storedResource = m_currentResourceStrorage.get(resourceName);
// create the exclude list
List<CmsProperty> excludeList = new ArrayList<CmsProperty>();
excludeList.add(property);
String noMatches = compareProperties(cms, resourceName, storedResource, excludeList);
// now see if we have collected any no-matches
if (noMatches.length() > 0) {
fail("error comparing resource " + resourceName + " with stored values: " + noMatches);
}
// test if the property was already in the stored result
List<CmsProperty> storedProperties = storedResource.getProperties();
if (!storedProperties.contains(property)) {
fail("property not found in stored value: " + property);
}
// test if the values of the changed propertiy is correct.
CmsProperty resourceProperty = cms.readPropertyObject(resourceName, property.getName(), false);
if (!resourceProperty.isIdentical(property)) {
fail("property is not identical :" + property + " != " + resourceProperty);
}
} catch (Exception e) {
fail("cannot read resource " + resourceName + " " + e.getMessage());
}
}
/**
* Compares the current properties of a resource with the stored values and a list of changed property.<p>
*
* @param cms an initialized CmsObject
* @param resourceName the name of the resource to compare
* @param excludeList a list of CmsProperties to exclude
*/
public void assertPropertyChanged(CmsObject cms, String resourceName, List<CmsProperty> excludeList) {
try {
// get the stored resource
OpenCmsTestResourceStorageEntry storedResource = m_currentResourceStrorage.get(resourceName);
String noMatches = compareProperties(cms, resourceName, storedResource, excludeList);
// now see if we have collected any no-matches
if (noMatches.length() > 0) {
fail("error comparing resource " + resourceName + " with stored values: " + noMatches);
}
// test if the values of the changed properties are correct and if the properties
// were already in the stored result
String propertyNoMatches = "";
String storedNotFound = "";
Iterator<CmsProperty> i = excludeList.iterator();
List<CmsProperty> storedProperties = storedResource.getProperties();
while (i.hasNext()) {
CmsProperty property = i.next();
CmsProperty resourceProperty = cms.readPropertyObject(resourceName, property.getName(), false);
// test if the property has the same value
if (!resourceProperty.isIdentical(property)) {
propertyNoMatches += "[" + property + " != " + resourceProperty + "]";
}
// test if the property was already in the stored object
if (!storedProperties.contains(property)) {
storedNotFound += "[" + property + "]";
}
}
// now see if we have collected any property no-matches
if (propertyNoMatches.length() > 0) {
fail("error comparing properties for resource " + resourceName + ": " + propertyNoMatches);
}
// now see if we have collected any property not found in the stored original
if (storedNotFound.length() > 0) {
fail("properties not found in stored value: " + storedNotFound);
}
} catch (Exception e) {
fail("cannot read resource " + resourceName + " " + e.getMessage());
}
}
/**
* Tests if a properetydefintion does exist.<p>
*
* @param cms the CmsObject
* @param propertyDefinition the propertsdefinition
*/
public void assertPropertydefinitionExist(CmsObject cms, CmsPropertyDefinition propertyDefinition) {
try {
CmsPropertyDefinition prop = cms.readPropertyDefinition(propertyDefinition.getName());
if (prop != null) {
if (!prop.getName().equals(propertyDefinition.getName())) {
fail("propertsdefinitions do not match: " + prop + " != " + propertyDefinition);
}
} else {
fail("cannot read propertydefitnion" + propertyDefinition);
}
} catch (CmsException e) {
fail("cannot read propertydefitnion" + propertyDefinition + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Tests the list the propertydefinitions are identical to a given list except one exlclude propertydefintion.<p>
*
* @param cms the CmsObject
* @param propertyDefintions the list of propertydefintions
* @param exclude the exclude propertydefinition
*/
public void assertPropertydefinitions(
CmsObject cms,
List<CmsPropertyDefinition> propertyDefintions,
CmsPropertyDefinition exclude) {
try {
String noMatches = "";
List<CmsPropertyDefinition> allPropertydefintions = cms.readAllPropertyDefinitions();
noMatches += comparePropertydefintions(propertyDefintions, allPropertydefintions, exclude);
noMatches += comparePropertydefintions(allPropertydefintions, propertyDefintions, exclude);
if (noMatches.length() > 0) {
fail("missig propertydefintions: " + noMatches);
}
} catch (CmsException e) {
fail("cannot read propertydefitnions " + CmsException.getStackTraceAsString(e));
}
}
/**
* Compares the current properties of a resource with the stored values.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
*/
public void assertPropertyEqual(CmsObject cms, String resourceName) {
try {
// get the stored resource
OpenCmsTestResourceStorageEntry storedResource = m_currentResourceStrorage.get(resourceName);
String noMatches = compareProperties(cms, resourceName, storedResource, null);
// now see if we have collected any no-matches
if (noMatches.length() > 0) {
fail("error comparing resource " + resourceName + " with stored values: " + noMatches);
}
} catch (Exception e) {
fail("cannot read resource " + resourceName + " " + e.getMessage());
}
}
/**
* Compares the current properties of a resource with the stored values and a given, new property.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param property the changed property
*/
public void assertPropertyNew(CmsObject cms, String resourceName, CmsProperty property) {
try {
// get the stored resource
OpenCmsTestResourceStorageEntry storedResource = m_currentResourceStrorage.get(resourceName);
// create the exclude list
List<CmsProperty> excludeList = new ArrayList<CmsProperty>();
excludeList.add(property);
String noMatches = compareProperties(cms, resourceName, storedResource, excludeList);
// now see if we have collected any no-matches
if (noMatches.length() > 0) {
fail("error comparing resource " + resourceName + " with stored values: " + noMatches);
}
// test if the property was already in the stored result
List storedProperties = storedResource.getProperties();
if (storedProperties.contains(property)) {
fail("property already found in stored value: " + property);
}
// test if the values of the changed propertiy is correct.
CmsProperty resourceProperty = cms.readPropertyObject(resourceName, property.getName(), false);
if (!resourceProperty.isIdentical(property)) {
fail("property is not identical :" + property + " != " + resourceProperty);
}
} catch (Exception e) {
fail("cannot read resource " + resourceName + " " + e.getMessage());
}
}
/**
* Compares the current properties of a resource with the stored values and a list of new property.<p>
*
* @param cms an initialized CmsObject
* @param resourceName the name of the resource to compare
* @param excludeList a list of CmsProperties to exclude
*/
public void assertPropertyNew(CmsObject cms, String resourceName, List excludeList) {
try {
// get the stored resource
OpenCmsTestResourceStorageEntry storedResource = m_currentResourceStrorage.get(resourceName);
String noMatches = compareProperties(cms, resourceName, storedResource, excludeList);
// now see if we have collected any no-matches
if (noMatches.length() > 0) {
fail("error comparing resource " + resourceName + " with stored values: " + noMatches);
}
// test if the values of the changed properties are correct and if the properties
// were already in the stored result
String propertyNoMatches = "";
String storedFound = "";
Iterator i = excludeList.iterator();
List storedProperties = storedResource.getProperties();
while (i.hasNext()) {
CmsProperty property = (CmsProperty)i.next();
CmsProperty resourceProperty = cms.readPropertyObject(resourceName, property.getName(), false);
// test if the property has the same value
if (!resourceProperty.isIdentical(property)) {
propertyNoMatches += "[" + property + " != " + resourceProperty + "]";
}
// test if the property was already in the stored object
if (storedProperties.contains(property)) {
storedFound += "[" + property + "]";
}
}
// now see if we have collected any property no-matches
if (propertyNoMatches.length() > 0) {
fail("error comparing properties for resource " + resourceName + ": " + propertyNoMatches);
}
// now see if we have collected any property not found in the stored original
if (storedFound.length() > 0) {
fail("properties already found in stored value: " + storedFound);
}
} catch (Exception e) {
fail("cannot read resource " + resourceName + " " + e.getMessage());
}
}
/**
* Compares the current properties of a resource with the stored values and a given, deleted property.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param property the deleted property
*/
public void assertPropertyRemoved(CmsObject cms, String resourceName, CmsProperty property) {
try {
// create the exclude list
List excludeList = new ArrayList();
excludeList.add(property);
// get the stored resource
OpenCmsTestResourceStorageEntry storedResource = m_currentResourceStrorage.get(resourceName);
String noMatches = compareProperties(cms, resourceName, storedResource, excludeList);
// now see if we have collected any no-matches
if (noMatches.length() > 0) {
fail("error comparing resource " + resourceName + " with stored values: " + noMatches);
}
// test if the property was already in the stored result
List storedProperties = storedResource.getProperties();
if (!storedProperties.contains(property)) {
fail("property not found in stored value: " + property);
}
// test if the values of the changed propertiy is correct.
CmsProperty resourceProperty = cms.readPropertyObject(resourceName, property.getName(), false);
if (resourceProperty != CmsProperty.getNullProperty()) {
fail("property is not removed :" + property + " != " + resourceProperty);
}
} catch (Exception e) {
fail("cannot read resource " + resourceName + " " + e.getMessage());
}
}
/**
* Compares the current properties of a resource with the stored values and a list of deleted properties.<p>
*
* @param cms an initialized CmsObject
* @param resourceName the name of the resource to compare
* @param excludeList a list of CmsProperties to exclude
*/
public void assertPropertyRemoved(CmsObject cms, String resourceName, List excludeList) {
try {
// get the stored resource
OpenCmsTestResourceStorageEntry storedResource = m_currentResourceStrorage.get(resourceName);
String noMatches = compareProperties(cms, resourceName, storedResource, excludeList);
// now see if we have collected any no-matches
if (noMatches.length() > 0) {
fail("error comparing resource " + resourceName + " with stored values: " + noMatches);
}
// test if the values of the changed properties are correct and if the properties
// were already in the stored result
String propertyNotDeleted = "";
String storedNotFound = "";
Iterator i = excludeList.iterator();
List storedProperties = storedResource.getProperties();
List resourceProperties = cms.readPropertyObjects(resourceName, false);
while (i.hasNext()) {
CmsProperty property = (CmsProperty)i.next();
// test if the property has the same value
if (resourceProperties.contains(property)) {
CmsProperty resourceProperty = cms.readPropertyObject(resourceName, property.getName(), false);
propertyNotDeleted += "[" + property + " != " + resourceProperty + "]";
}
// test if the property was already in the stored object
if (!storedProperties.contains(property)) {
storedNotFound += "[" + property + "]";
}
}
// now see if we have collected any property no-matches
if (propertyNotDeleted.length() > 0) {
fail("properties not deleted for " + resourceName + ": " + propertyNotDeleted);
}
// now see if we have collected any property not found in the stored original
if (storedNotFound.length() > 0) {
fail("properties not found in stored value: " + storedNotFound);
}
} catch (Exception e) {
fail("cannot read resource " + resourceName + " " + e.getMessage());
}
}
/**
* Asserts the equality of the two given relations.<p>
*
* @param expected the expected relation
* @param actual the actual result
*/
public void assertRelation(CmsRelation expected, CmsRelation actual) {
assertEquals(expected.getSourceId(), actual.getSourceId());
assertEquals(expected.getSourcePath(), actual.getSourcePath());
assertEquals(expected.getTargetId(), actual.getTargetId());
assertEquals(expected.getTargetPath(), actual.getTargetPath());
assertEquals(expected.getType(), actual.getType());
}
/**
* Compares the current resource id of a resource with a given id.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param resourceId the id
*/
public void assertResourceId(CmsObject cms, String resourceName, CmsUUID resourceId) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (!res.getResourceId().equals(resourceId)) {
fail("[ResourceId] " + resourceId + " != " + res.getResourceId() + "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Ensures that the given resource is of a certain type.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to check
* @param resourceType the resource type to check for
*/
public void assertResourceType(CmsObject cms, String resourceName, int resourceType) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (res.getTypeId() != resourceType) {
fail("[ResourceType " + res.getTypeId() + " != " + resourceType + "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Validates if the current sibling count of a resource matches the given number.<p>
*
* @param cms the current user's Cms object
* @param resourceName the name of the resource to compare
* @param count the number of additional siblings
*/
public void assertSiblingCount(CmsObject cms, String resourceName, int count) {
try {
// get the current resource from the VFS
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (res.getSiblingCount() != count) {
fail("[SiblingCount " + res.getSiblingCount() + " != " + count + "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Validates if the current sibling count of a resource has been incremented
* compared to it's previous sibling count.<p>
*
* @param cms the current user's Cms object
* @param resourceName the name of the resource to compare
* @param increment the number of additional siblings compared to the original state
*/
public void assertSiblingCountIncremented(CmsObject cms, String resourceName, int increment) {
try {
// get the current resource from the VFS
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
// get the previous resource from resource storage
OpenCmsTestResourceStorageEntry entry = m_currentResourceStrorage.get(resourceName);
if (res.getSiblingCount() != (entry.getSiblingCount() + increment)) {
fail("[SiblingCount "
+ res.getSiblingCount()
+ " != "
+ entry.getSiblingCount()
+ "+"
+ increment
+ "]");
}
} catch (Exception e) {
fail("cannot read resource " + resourceName + " " + e.getMessage());
}
}
/**
* Compares the current state of a resource with a given state.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param state the state
*/
public void assertState(CmsObject cms, String resourceName, CmsResourceState state) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (res.getState() != state) {
fail("[State " + state + " != " + res.getState() + "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Compares the current structure id of a resource with a given id.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param structureId the id
*/
public void assertStructureId(CmsObject cms, String resourceName, CmsUUID structureId) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (!res.getStructureId().equals(structureId)) {
fail("[StructureId] " + structureId + " != " + res.getStructureId() + "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Compares the current type of a resource with a given type.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param type the type
*/
public void assertType(CmsObject cms, String resourceName, int type) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (res.getTypeId() != type) {
fail("[State " + type + " != " + res.getTypeId() + "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Compares the user who created a resource with a given user.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param user the last modification user
*/
public void assertUserCreated(CmsObject cms, String resourceName, CmsUser user) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (!res.getUserCreated().equals(user.getId())) {
fail(createUserFailMessage(cms, "UserCreated", user.getId(), res.getUserLastModified()));
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Compares the current user last modified of a resource with a given user.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param user the last modification user
*/
public void assertUserLastModified(CmsObject cms, String resourceName, CmsUser user) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (!res.getUserLastModified().equals(user.getId())) {
fail(createUserFailMessage(cms, "UserLastModified", user.getId(), res.getUserLastModified()));
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Tests if the current version of a resource is equals to the given version number.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param version the version number to check
*/
public void assertVersion(CmsObject cms, String resourceName, int version) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
assertEquals("Version", version, res.getVersion());
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Creates a new storage object.<p>
* @param name the name of the storage
*/
public void createStorage(String name) {
OpenCmsTestResourceStorage storage = new OpenCmsTestResourceStorage(name);
m_resourceStorages.put(name, storage);
}
/**
* Should return the additional connection name.<p>
*
* @return the name of the additional connection
*/
public String getConnectionName() {
return "additional";
}
/**
* Returns the name of the database product.<p>
*
* @return returns either oracle or mysql
*/
public String getDatabaseProduct() {
return m_dbProduct;
}
/**
* Gets an precalculate resource state from the storage.<p>
*
* @param resourceName the name of the resource to get the state
* @return precalculated resource state
* @throws Exception in case something goes wrong
*/
public CmsResourceState getPreCalculatedState(String resourceName) throws Exception {
return m_currentResourceStrorage.getPreCalculatedState(resourceName);
}
/**
* Resets the mapping for resourcenames.<p>
*/
public void resetMapping() {
m_currentResourceStrorage.resetMapping();
}
/**
* Sets the mapping for resourcenames.<p>
*
* @param source the source resource name
* @param target the target resource name
*/
public void setMapping(String source, String target) {
m_currentResourceStrorage.setMapping(source, target);
}
/**
* Stores the state (e.g. attributes, properties, content, lock state and ACL) of
* a resource in the internal resource storage.<p>
*
* If the resourceName is the name of a folder in the vfs, all subresoruces are stored as well.
*
* @param cms an initialized CmsObject
* @param resourceName the name of the resource in the vfs
*/
public void storeResources(CmsObject cms, String resourceName) {
storeResources(cms, resourceName, true);
}
/**
* Stores the state (e.g. attributes, properties, content, lock state and ACL) of
* a resource in the internal resource storage.<p>
*
* If the resourceName is the name of a folder in the vfs and storeSubresources is true,
* all subresoruces are stored as well.
*
* @param cms an initialized CmsObject
* @param resourceName the name of the resource in the vfs
* @param storeSubresources indicates to store subresources of folders
*/
public void storeResources(CmsObject cms, String resourceName, boolean storeSubresources) {
String resName = "";
try {
CmsResource resource = cms.readResource(resourceName, CmsResourceFilter.ALL);
// test if the name belongs to a file or folder
if (resource.isFile()) {
m_currentResourceStrorage.add(cms, resourceName, resource);
} else {
// this is a folder, so first add the folder itself to the storage
m_currentResourceStrorage.add(cms, resourceName
+ (resourceName.charAt(resourceName.length() - 1) != '/' ? "/" : ""), resource);
if (!storeSubresources) {
return;
}
// now get all subresources and add them as well
List resources = cms.readResources(resourceName, CmsResourceFilter.ALL);
Iterator i = resources.iterator();
while (i.hasNext()) {
CmsResource res = (CmsResource)i.next();
resName = cms.getSitePath(res);
m_currentResourceStrorage.add(cms, resName, res);
}
}
} catch (CmsException e) {
fail("cannot read resource "
+ resourceName
+ " or "
+ resName
+ " "
+ CmsException.getStackTraceAsString(e));
}
}
/**
* Switches the internal resource storage.<p>
* @param name the name of the storage
* @throws CmsException if the storage was not found
*/
public void switchStorage(String name) throws CmsException {
OpenCmsTestResourceStorage storage = m_resourceStorages.get(name);
if (storage != null) {
m_currentResourceStrorage = storage;
} else {
throw new CmsException(Messages.get().container(Messages.ERR_RESOURCE_STORAGE_NOT_FOUND_0));
}
}
/**
* Deletes the given file from the rfs.<p>
*
* @param absolutePath the absolute path of the file
*/
protected void deleteFile(String absolutePath) {
try {
// sleep 0.5 seconds - sometimes deletion does not work if not waiting
Thread.sleep(500);
} catch (InterruptedException e) {
// ignore
}
File file = new File(absolutePath);
if (file.exists()) {
if (!file.delete()) {
file.deleteOnExit();
}
}
}
/**
* Writes a message to the current output stream.<p>
*
* @param message the message to write
*/
protected void echo(String message) {
try {
System.out.println();
m_shell.printPrompt();
System.out.println(message);
} catch (Throwable t) {
throw new RuntimeException(t);
}
}
/**
* Returns an initialized CmsObject with admin user permissions,
* running in the "/sites/default" site root.<p>
*
* @return an initialized CmsObject with admin user permissions
* @throws CmsException in case of OpenCms access errors
*/
protected CmsObject getCmsObject() throws CmsException {
// log in the Admin user and switch to the setup project
CmsObject cms = OpenCms.initCmsObject(OpenCms.getDefaultUsers().getUserGuest());
cms.loginUser("Admin", "admin");
// switch to the "Offline" project
cms.getRequestContext().setCurrentProject(cms.readProject("Offline"));
cms.getRequestContext().setSiteRoot("/sites/default/");
// init the storage
createStorage(OpenCmsTestResourceStorage.DEFAULT_STORAGE);
switchStorage(OpenCmsTestResourceStorage.DEFAULT_STORAGE);
// return the initialized cms context Object
return cms;
}
/**
* Imports a module (zipfile) from the default module directory,
* creating a temporary project for this.<p>
*
* @param importFile the name of the import module located in the default module directory
*
* @throws Exception if something goes wrong
*
* @see org.opencms.importexport.CmsImportExportManager#importData(CmsObject, I_CmsReport, CmsImportParameters)
*/
protected void importModuleFromDefault(String importFile) throws Exception {
String exportPath = OpenCms.getSystemInfo().getPackagesRfsPath();
String fileName = OpenCms.getSystemInfo().getAbsoluteRfsPathRelativeToWebInf(
exportPath + CmsSystemInfo.FOLDER_MODULES + importFile);
CmsImportParameters params = new CmsImportParameters(fileName, "/", true);
OpenCms.getImportExportManager().importData(
getCmsObject(),
new CmsShellReport(getCmsObject().getRequestContext().getLocale()),
params);
}
/**
* Removes and deletes a storage object.<p>
* @param name the name of the storage
*/
protected void removeStorage(String name) {
OpenCmsTestResourceStorage storage = m_resourceStorages.get(name);
if (storage != null) {
m_resourceStorages.remove(name);
storage = null;
}
}
/**
* Restarts the cms.<p>
*/
protected void restart() {
OpenCmsTestLogAppender.setBreakOnError(false);
// output a message
System.out.println("\n\n\n----- Restarting shell -----");
m_shell.exit();
m_shell = new CmsShell(getTestDataPath("WEB-INF" + File.separator), null, null, "${user}@${project}>", null);
OpenCmsTestLogAppender.setBreakOnError(true);
}
/**
* Compares two vectors of access entries and creates a list of all access control entries which are
* not matching and are not included in a seperate exclude list.
* @param cms the CmsObject
* @param resourceName the name of the resource the properties belong to
* @param storedResource the stored resource corresponding to the resourcename
* @param excludeList the list of ccess entries to exclude in the test or null
* @return string of non matching access entries
* @throws CmsException if something goes wrong
*/
private String compareAccessEntries(
CmsObject cms,
String resourceName,
OpenCmsTestResourceStorageEntry storedResource,
List excludeList) throws CmsException {
String noMatches = "";
List resAce = cms.getAccessControlEntries(resourceName);
List storedAce = storedResource.getAccessControlEntries();
List unmatchedAce;
unmatchedAce = compareAce(resAce, storedAce, excludeList);
if (unmatchedAce.size() > 0) {
noMatches += "[ACE missing " + unmatchedAce.toString() + "]\n";
}
unmatchedAce = compareAce(storedAce, resAce, excludeList);
if (unmatchedAce.size() > 0) {
noMatches += "[ACE missing " + unmatchedAce.toString() + "]\n";
}
return noMatches;
}
/**
* Compares two access lists and creates a list of permission sets which are
* not matching and are not included in a seperate exclude list.
* @param cms the CmsObject
* @param resourceName the name of the resource the properties belong to
* @param storedResource the stored resource corresponding to the resourcename
* @param excludeList the list of permission sets to exclude in the test or null
* @return string of non matching access list entries
* @throws CmsException if something goes wrong
*/
private String compareAccessLists(
CmsObject cms,
String resourceName,
OpenCmsTestResourceStorageEntry storedResource,
List excludeList) throws CmsException {
String noMatches = "";
CmsAccessControlList resList = cms.getAccessControlList(resourceName);
CmsAccessControlList storedList = storedResource.getAccessControlList();
List unmatchedList;
unmatchedList = compareList(resList, storedList, excludeList);
if (unmatchedList.size() > 0) {
noMatches += "[ACL differences " + unmatchedList.toString() + "]\n";
}
unmatchedList = compareList(storedList, resList, excludeList);
if (unmatchedList.size() > 0) {
noMatches += "[ACL differences " + unmatchedList.toString() + "]\n";
}
return noMatches;
}
/**
* Compares two vectors of access control entires.<p>
*
* @param source the source vector to compare
* @param target the destination vector to compare
* @param exclude the exclude list
* @return list of non matching access control entires
*/
private List compareAce(List source, List target, List exclude) {
boolean isOverwriteAll = false;
Iterator itTargets = target.iterator();
while (itTargets.hasNext()) {
CmsAccessControlEntry ace = (CmsAccessControlEntry)itTargets.next();
if (ace.isOverwriteAll()) {
isOverwriteAll = true;
}
}
List result = new ArrayList();
Iterator i = source.iterator();
while (i.hasNext()) {
CmsAccessControlEntry ace = (CmsAccessControlEntry)i.next();
// here would be best to check the path of the overwrite all entry
// but since we have just the resource id, instead of the structure id
// we are not able to do that here :(
if (!target.contains(ace) && !isOverwriteAll) {
result.add(ace);
}
}
// finally match the result list with the exclude list
if (exclude != null) {
Iterator l = exclude.iterator();
while (l.hasNext()) {
CmsAccessControlEntry excludeAce = (CmsAccessControlEntry)l.next();
if (result.contains(excludeAce)) {
result.remove(excludeAce);
}
}
}
return result;
}
/**
* Compares two lists of permission sets.<p>
* @param source the source list to compare
* @param target the destination list to compare
* @param exclude the exclude list
* @return list of non matching permission sets
*/
private List compareList(CmsAccessControlList source, CmsAccessControlList target, List exclude) {
boolean isOverwriteAll = false;
Iterator itTargets = target.getPermissionMap().keySet().iterator();
while (itTargets.hasNext()) {
CmsUUID principalId = (CmsUUID)itTargets.next();
if (principalId.equals(CmsAccessControlEntry.PRINCIPAL_OVERWRITE_ALL_ID)) {
isOverwriteAll = true;
}
}
HashMap result = new HashMap();
Map destinationMap = target.getPermissionMap();
Map sourceMap = source.getPermissionMap();
Iterator i = sourceMap.entrySet().iterator();
while (i.hasNext()) {
Map.Entry entry = (Map.Entry)i.next();
CmsUUID key = (CmsUUID)entry.getKey();
CmsPermissionSet value = (CmsPermissionSet)entry.getValue();
if (destinationMap.containsKey(key)) {
CmsPermissionSet destValue = (CmsPermissionSet)destinationMap.get(key);
if (!destValue.equals(value)) {
result.put(key, key + " " + value + " != " + destValue);
}
} else if (!isOverwriteAll) {
// here would be best to check the path of the overwrite all entry
// but since we have just the resource id, instead of the structure id
// we are not able to do that here :(
result.put(key, "missing " + key);
}
}
// finally match the result list with the exclude list
if (exclude != null) {
Iterator l = exclude.iterator();
while (l.hasNext()) {
CmsUUID excludeUUID = (CmsUUID)l.next();
if (result.containsKey(excludeUUID)) {
result.remove(excludeUUID);
}
}
}
return new ArrayList(result.values());
}
/**
* Compares two lists of propertydefintions excluding an exclude propertydefintion.
* @param source the source list of propertydefintions
* @param target the target list of propertydefintions
* @param exclude the exclude propertydefintion
* @return String of missing propertydefinitions
*/
private String comparePropertydefintions(List source, List target, CmsPropertyDefinition exclude) {
String noMatches = "";
Iterator i = source.iterator();
while (i.hasNext()) {
CmsPropertyDefinition prop = (CmsPropertyDefinition)i.next();
if ((!target.contains(prop)) && (!prop.getName().equals(exclude.getName()))) {
noMatches += "[" + prop + "]";
}
}
return noMatches;
}
/**
* Creates a user compare fail message.<p>
*
* @param cms the current OpenCms user context
* @param message the message to show
* @param user1 the id of the first (expected) user
* @param user2 the id of the second (found) user
* @return a user compare fail message
*
* @throws CmsException if one of the users can't be read
*/
private String createUserFailMessage(CmsObject cms, String message, CmsUUID user1, CmsUUID user2)
throws CmsException {
StringBuffer result = new StringBuffer();
result.append("[");
result.append(message);
result.append(" (");
result.append(cms.readUser(user1).getName());
result.append(") ");
result.append(user1);
result.append(" != (");
result.append(cms.readUser(user2).getName());
result.append(") ");
result.append(user1);
result.append("]");
return result.toString();
}
/**
* Creates a map of all parent resources of a OpenCms resource.<p>
* The resource UUID is used as key, the full resource path is used as the value.
*
* @param cms the CmsObject
* @param resourceName the name of the resource to get the parent map from
* @return HashMap of parent resources
*/
private Map<CmsUUID, String> getParents(CmsObject cms, String resourceName) {
HashMap<CmsUUID, String> parents = new HashMap<CmsUUID, String>();
List<CmsResource> parentResources = new ArrayList<CmsResource>();
try {
// get all parent folders of the current file
parentResources = cms.readPath(resourceName, CmsResourceFilter.IGNORE_EXPIRATION);
} catch (CmsException e) {
// ignore
}
Iterator<CmsResource> k = parentResources.iterator();
while (k.hasNext()) {
// add the current folder to the map
CmsResource curRes = k.next();
parents.put(curRes.getResourceId(), curRes.getRootPath());
}
return parents;
}
/**
* Initializes the OpenCms/database configuration
* by reading the appropriate values from opencms.properties.<p>
*/
private void initConfiguration() {
if (m_configuration == null) {
initTestDataPath();
m_configuration = OpenCmsTestProperties.getInstance().getConfiguration();
m_dbProduct = OpenCmsTestProperties.getInstance().getDbProduct();
int index = 0;
boolean cont;
do {
cont = false;
if (m_configuration.containsKey(OpenCmsTestProperties.PROP_TEST_DATA_PATH + "." + index)) {
addTestDataPath(m_configuration.get(OpenCmsTestProperties.PROP_TEST_DATA_PATH + "." + index));
cont = true;
index++;
}
} while (cont);
String propertyFile = "";
try {
propertyFile = getTestDataPath("WEB-INF/config." + m_dbProduct + "/opencms.properties");
m_configuration = new CmsParameterConfiguration(propertyFile);
} catch (Exception e) {
fail("Error while reading configuration from '" + propertyFile + "'\n" + e.toString());
return;
}
String key = "setup";
m_setupConnection = new ConnectionData();
m_setupConnection.m_dbName = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL + "." + key + "." + "dbName");
m_setupConnection.m_jdbcUrl = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL + "." + key + "." + "jdbcUrl");
m_setupConnection.m_userName = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL + "." + key + "." + "user");
m_setupConnection.m_userPassword = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ "password");
m_setupConnection.m_jdbcDriver = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ CmsDbPool.KEY_JDBC_DRIVER);
m_setupConnection.m_jdbcUrl = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ CmsDbPool.KEY_JDBC_URL);
m_setupConnection.m_jdbcUrlParams = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ CmsDbPool.KEY_JDBC_URL_PARAMS);
key = "default";
m_defaultConnection = new ConnectionData();
m_defaultConnection.m_dbName = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL + "." + key + "." + "dbName");
m_defaultConnection.m_userName = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ CmsDbPool.KEY_USERNAME);
m_defaultConnection.m_userPassword = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ CmsDbPool.KEY_PASSWORD);
m_defaultConnection.m_jdbcDriver = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ CmsDbPool.KEY_JDBC_DRIVER);
m_defaultConnection.m_jdbcUrl = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ CmsDbPool.KEY_JDBC_URL);
m_defaultConnection.m_jdbcUrlParams = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ CmsDbPool.KEY_JDBC_URL_PARAMS);
key = getConnectionName();
if (m_configuration.get(CmsDbPool.KEY_DATABASE_POOL + "." + key + "." + "dbName") != null) {
m_additionalConnection = new ConnectionData();
m_additionalConnection.m_dbName = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ "dbName");
m_additionalConnection.m_userName = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ CmsDbPool.KEY_USERNAME);
m_additionalConnection.m_userPassword = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ CmsDbPool.KEY_PASSWORD);
m_additionalConnection.m_jdbcDriver = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ CmsDbPool.KEY_JDBC_DRIVER);
m_additionalConnection.m_jdbcUrl = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ CmsDbPool.KEY_JDBC_URL);
m_additionalConnection.m_jdbcUrlParams = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ CmsDbPool.KEY_JDBC_URL_PARAMS);
}
m_defaultTablespace = m_configuration.get("db.oracle.defaultTablespace");
m_indexTablespace = m_configuration.get("db.oracle.indexTablespace");
m_tempTablespace = m_configuration.get("db.oracle.temporaryTablespace");
System.out.println("----- Starting tests on database "
+ m_dbProduct
+ " ("
+ m_setupConnection.m_jdbcUrl
+ ") "
+ "-----");
}
}
}
| test/org/opencms/test/OpenCmsTestCase.java | /*
* This library is part of OpenCms -
* the Open Source Content Management System
*
* Copyright (c) Alkacon Software GmbH (http://www.alkacon.com)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* For further information about Alkacon Software GmbH, please see the
* company website: http://www.alkacon.com
*
* For further information about OpenCms, please see the
* project website: http://www.opencms.org
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.opencms.test;
import org.opencms.configuration.CmsParameterConfiguration;
import org.opencms.db.CmsDbPool;
import org.opencms.db.CmsResourceState;
import org.opencms.file.CmsFile;
import org.opencms.file.CmsGroup;
import org.opencms.file.CmsObject;
import org.opencms.file.CmsProject;
import org.opencms.file.CmsProperty;
import org.opencms.file.CmsPropertyDefinition;
import org.opencms.file.CmsResource;
import org.opencms.file.CmsResourceFilter;
import org.opencms.file.CmsUser;
import org.opencms.file.history.CmsHistoryFile;
import org.opencms.file.history.I_CmsHistoryResource;
import org.opencms.file.types.CmsResourceTypeBinary;
import org.opencms.file.types.CmsResourceTypeFolder;
import org.opencms.file.types.CmsResourceTypePlain;
import org.opencms.importexport.CmsImportParameters;
import org.opencms.lock.CmsLock;
import org.opencms.lock.CmsLockType;
import org.opencms.main.CmsException;
import org.opencms.main.CmsShell;
import org.opencms.main.CmsSystemInfo;
import org.opencms.main.OpenCms;
import org.opencms.publish.CmsPublishJobBase;
import org.opencms.publish.CmsPublishJobInfoBean;
import org.opencms.relations.CmsRelation;
import org.opencms.report.CmsShellReport;
import org.opencms.report.I_CmsReport;
import org.opencms.security.CmsAccessControlEntry;
import org.opencms.security.CmsAccessControlList;
import org.opencms.security.CmsPermissionSet;
import org.opencms.security.CmsPermissionSetCustom;
import org.opencms.security.I_CmsPrincipal;
import org.opencms.setup.CmsSetupDb;
import org.opencms.util.CmsDateUtil;
import org.opencms.util.CmsFileUtil;
import org.opencms.util.CmsUUID;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.Serializable;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;
import junit.extensions.TestSetup;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.apache.commons.io.filefilter.FileFilterUtils;
import org.dom4j.Document;
import org.dom4j.Node;
import org.dom4j.util.NodeComparator;
/**
* Extends the JUnit standard with methods to handle an OpenCms database
* test instance.<p>
*
* The required configuration files are located in the
* <code>${test.data.path}/WEB-INF</code> folder structure.<p>
*
* To run this test you might have to change the database connection
* values in the provided <code>${test.data.path}/WEB-INF/config/opencms.properties</code> file.<p>
*
* @since 6.0.0
*/
public class OpenCmsTestCase extends TestCase {
/** Class to bundle the connection information. */
protected static class ConnectionData {
/** The name of the database. */
public String m_dbName;
/** The database driver. */
public String m_jdbcDriver;
/** The database url. */
public String m_jdbcUrl;
/** Additional database parameters. */
public String m_jdbcUrlParams;
/** The name of the user. */
public String m_userName;
/** The password of the user. */
public String m_userPassword;
}
/**
* Extension of <code>NodeComparator</code> to store unequal nodes.<p>
*/
static class InternalNodeComparator extends NodeComparator implements Serializable {
/** UID required for safe serialization. */
private static final long serialVersionUID = 2742216550970181832L;
/** Unequal node1. */
public Node m_node1;
/** Unequal node2. */
public Node m_node2;
/**
* @see org.dom4j.util.NodeComparator#compare(org.dom4j.Node, org.dom4j.Node)
*/
@Override
public int compare(Node n1, Node n2) {
int result = super.compare(n1, n2);
if ((result != 0) && (m_node1 == null)) {
m_node1 = n1;
m_node2 = n2;
}
return result;
}
}
/** test article type id constant. */
public static final int ARTICLE_TYPEID = 27;
/** Special character constant. */
public static final String C_AUML_LOWER = "\u00e4";
/** Special character constant. */
public static final String C_AUML_UPPER = "\u00c4";
/** Special character constant. */
public static final String C_EURO = "\u20ac";
/** Special character constant. */
public static final String C_OUML_LOWER = "\u00f6";
/** Special character constant. */
public static final String C_OUML_UPPER = "\u00d6";
/** Special character constant. */
public static final String C_SHARP_S = "\u00df";
/** Special character constant. */
public static final String C_UUML_LOWER = "\u00fc";
/** Special character constant. */
public static final String C_UUML_UPPER = "\u00dc";
/** Key for tests on MySql database. */
public static final String DB_MYSQL = "mysql";
/** Key for tests on Oracle database. */
public static final String DB_ORACLE = "oracle";
/** The OpenCms/database configuration. */
public static CmsParameterConfiguration m_configuration;
/** Name of the default tablespace (oracle only). */
public static String m_defaultTablespace;
/** Name of the index tablespace (oracle only). */
public static String m_indexTablespace;
/** The internal storages. */
public static HashMap<String, OpenCmsTestResourceStorage> m_resourceStorages;
/** Name of the temporary tablespace (oracle only). */
public static String m_tempTablespace;
/** Additional connection data. */
protected static ConnectionData m_additionalConnection;
/** The user connection data. */
protected static ConnectionData m_defaultConnection;
/** The setup connection data. */
protected static ConnectionData m_setupConnection;
/** The cached list of OpenCms class names. */
private static List<String> classNameList;
/** The file date of the configuration files. */
private static long[] m_dateConfigFiles;
/** DB product used for the tests. */
private static String m_dbProduct = DB_MYSQL;
/** The path to the default setup data files. */
private static String m_setupDataPath;
/** The initialized OpenCms shell instance. */
private static CmsShell m_shell;
/** The list of paths to the additional test data files. */
private static List<String> m_testDataPath;
/** The current resource storage. */
public OpenCmsTestResourceStorage m_currentResourceStrorage;
/**
* Default JUnit constructor.<p>
*
* @param arg0 JUnit parameters
*/
public OpenCmsTestCase(String arg0) {
this(arg0, true);
}
/**
* JUnit constructor.<p>
* @param arg0 JUnit parameters
* @param initialize indicates if the configuration will be initialized
*/
public OpenCmsTestCase(String arg0, boolean initialize) {
super(arg0);
if (initialize) {
OpenCmsTestLogAppender.setBreakOnError(false);
if (m_resourceStorages == null) {
m_resourceStorages = new HashMap<String, OpenCmsTestResourceStorage>();
}
// initialize configuration
initConfiguration();
// set "OpenCmsLog" system property to enable the logger
OpenCmsTestLogAppender.setBreakOnError(true);
}
}
/**
* Generates a sub tree of folders with files.<p>
*
* @param cms the cms context
* @param vfsFolder name of the folder
* @param numberOfFiles the number of files to generate
* @param fileTypeDistribution a percentage: x% binary files and (1-x)% text files
*
* @return the number of files generated
*
* @throws Exception if something goes wrong
*/
public static int generateContent(CmsObject cms, String vfsFolder, int numberOfFiles, double fileTypeDistribution)
throws Exception {
int maxProps = 10;
double propertyDistribution = 0.0;
int writtenFiles = 0;
int numberOfBinaryFiles = (int)(numberOfFiles * fileTypeDistribution);
// generate binary files
writtenFiles += generateResources(
cms,
"org/opencms/search/pdf-test-112.pdf",
vfsFolder,
numberOfBinaryFiles,
CmsResourceTypeBinary.getStaticTypeId(),
maxProps,
propertyDistribution);
// generate text files
writtenFiles += generateResources(cms, "org/opencms/search/extractors/test1.html", vfsFolder, numberOfFiles
- numberOfBinaryFiles, CmsResourceTypePlain.getStaticTypeId(), maxProps, propertyDistribution);
System.out.println("" + writtenFiles + " files written in Folder " + vfsFolder);
return writtenFiles;
}
/**
* Generates a sub tree of folders with files.<p>
*
* @param cms the cms context
* @param vfsFolder where to create the subtree
* @param maxWidth an upper bound for the number of subfolder a folder should have
* @param maxDepth an upper bound for depth of the genearted subtree
* @param maxProps upper bound for number of properties to create for each resource
* @param propertyDistribution a percentage: x% shared props and (1-x)% individuals props
* @param maxNumberOfFiles upper bound for the number of files in each folder
* @param fileTypeDistribution a percentage: x% binary files and (1-x)% text files
*
* @return the number of really written files
*
* @throws Exception if something goes wrong
*/
public static int generateContent(
CmsObject cms,
String vfsFolder,
int maxWidth,
int maxDepth,
int maxProps,
double propertyDistribution,
int maxNumberOfFiles,
double fileTypeDistribution) throws Exception {
int fileNameLength = 10;
int propValueLength = 10;
// end recursion
if (maxDepth < 1) {
return 0;
}
if (!vfsFolder.endsWith("/")) {
vfsFolder += "/";
}
int writtenFiles = 0;
Random rnd = new Random();
int width = rnd.nextInt(maxWidth) + 1;
int depth = maxDepth - rnd.nextInt(2);
for (int i = 0; i < width; i++) {
// generate folder
String vfsName = vfsFolder + generateName(fileNameLength) + i;
List<CmsProperty> props = generateProperties(cms, maxProps, propValueLength, propertyDistribution);
cms.createResource(vfsName, CmsResourceTypeFolder.getStaticTypeId(), new byte[0], props);
cms.unlockResource(vfsName);
int numberOfFiles = rnd.nextInt(maxNumberOfFiles) + 1;
// generate binary files
int numberOfBinaryFiles = (int)(numberOfFiles * fileTypeDistribution);
writtenFiles += generateResources(
cms,
"org/opencms/search/pdf-test-112.pdf",
vfsName,
numberOfBinaryFiles,
CmsResourceTypeBinary.getStaticTypeId(),
maxProps,
propertyDistribution);
// generate text files
writtenFiles += generateResources(cms, "org/opencms/search/extractors/test1.html", vfsName, numberOfFiles
- numberOfBinaryFiles, CmsResourceTypePlain.getStaticTypeId(), maxProps, propertyDistribution);
// in depth recursion
writtenFiles += generateContent(
cms,
vfsName,
maxWidth,
depth - 1,
maxProps,
propertyDistribution,
maxNumberOfFiles,
fileTypeDistribution);
System.out.println("" + writtenFiles + " files written in Folder " + vfsName);
}
return writtenFiles;
}
/**
* Generate a new random name.<p>
*
* @param maxLen upper bound for the length of the name
*
* @return a random name
*/
public static String generateName(int maxLen) {
String name = "";
Random rnd = new Random();
int len = rnd.nextInt(maxLen) + 1;
for (int j = 0; j < len; j++) {
name += (char)(rnd.nextInt(26) + 97);
}
return name;
}
/**
* Generates random properties.<p>
*
* @param cms the cms context
* @param maxProps upper bound for number of properties to create for each resource
* @param propValueLength upper bound for the number of char for the values
* @param propertyDistribution a percentage: x% shared props and (1-x)% individuals props
*
* @return a list of <code>{@link CmsProperty}</code> objects
*
* @throws CmsException if something goes wrong
*/
public static List<CmsProperty> generateProperties(
CmsObject cms,
int maxProps,
int propValueLength,
double propertyDistribution) throws CmsException {
List<CmsPropertyDefinition> propList = cms.readAllPropertyDefinitions();
List<CmsProperty> props = new ArrayList<CmsProperty>();
if (maxProps > propList.size()) {
maxProps = propList.size();
}
Random rnd = new Random();
int propN = rnd.nextInt(maxProps) + 1;
for (int j = 0; j < propN; j++) {
CmsPropertyDefinition propDef = propList.get((int)(Math.random() * propList.size()));
propList.remove(propDef);
if (Math.random() < propertyDistribution) {
// only resource prop
props.add(new CmsProperty(propDef.getName(), null, generateName(propValueLength)));
} else {
// resource and structure props
props.add(new CmsProperty(
propDef.getName(),
generateName(propValueLength),
generateName(propValueLength)));
}
}
return props;
}
/**
* Generates n new resources in a given folder.<p>
*
* @param cms the cms context
* @param rfsName the rfs file for the content
* @param vfsFolder the folder to create the resources in
* @param n number of resources to generate
* @param type the type of the resource
* @param maxProps upper bound for number of properties to create for each resource
* @param propertyDistribution a percentage: x% shared props and (1-x)% individuals props
*
* @return the number of really written files
*
* @throws Exception if something goes wrong
*/
public static int generateResources(
CmsObject cms,
String rfsName,
String vfsFolder,
int n,
int type,
int maxProps,
double propertyDistribution) throws Exception {
int fileNameLength = 10;
int propValueLength = 10;
if (!vfsFolder.endsWith("/")) {
vfsFolder += "/";
}
int writtenFiles = 0;
System.out.println("Importing Files");
for (int i = 0; i < n; i++) {
String vfsName = vfsFolder + generateName(fileNameLength) + i;
if (rfsName.lastIndexOf('.') > 0) {
vfsName += rfsName.substring(rfsName.lastIndexOf('.'));
}
List<CmsProperty> props = generateProperties(cms, maxProps, propValueLength, propertyDistribution);
try {
OpenCmsTestCase.importTestResource(cms, rfsName, vfsName, type, props);
writtenFiles++;
} catch (Exception e) {
System.out.println("error! " + e.getMessage());
}
}
return writtenFiles;
}
/**
* Generates a wrapper for a test class which handles setting up the OpenCms instance.<p>
*
* @param testClass the test class to wrap
* @param importFolder the RFS folder with the test data to import
* @param targetFolder the VFS target folder for the test data
*
* @return the wrapped test
*/
public static Test generateSetupTestWrapper(
Class<? extends Test> testClass,
final String importFolder,
final String targetFolder) {
try {
TestSuite suite = new TestSuite();
suite.setName(testClass.getName());
Constructor<? extends Test> constructor = testClass.getConstructor(String.class);
for (Method method : testClass.getMethods()) {
String methodName = method.getName();
if (methodName.startsWith("test") && (method.getParameterTypes().length == 0)) {
Test test = constructor.newInstance(method.getName());
suite.addTest(test);
}
}
TestSetup wrapper = new TestSetup(suite) {
/**
* @see junit.extensions.TestSetup#setUp()
*/
@Override
protected void setUp() {
setupOpenCms(importFolder, targetFolder);
}
/**
* @see junit.extensions.TestSetup#tearDown()
*/
@Override
protected void tearDown() {
removeOpenCms();
}
};
return wrapper;
} catch (Throwable e) {
throw new RuntimeException(e);
}
}
/**
* Generates n new users for a given group.<p>
*
* @param cms the cms context
* @param groupName the group name, group will be creating if group does not exists
* @param n number of users to generate
*
* @throws CmsException if something goes wrong
*/
public static void generateUsers(CmsObject cms, String groupName, int n) throws CmsException {
CmsGroup group = null;
try {
group = cms.readGroup(groupName);
} catch (Exception e) {
// ignore
}
if (group == null) {
cms.createGroup(groupName, groupName, 0, null);
}
for (int i = 0; i < n; i++) {
String name = generateName(10) + i;
cms.createUser(name, "pwd" + i, "test user " + i, null);
cms.addUserToGroup(name, groupName);
}
}
/**
* Gets the list of all names of classes which exist as class files in a directory in the classpath (not in JARs) and whose path contains 'opencms' or 'alkacon'.
*
* @return the list of all opencms class on the class path
*
* @throws Exception if something goes wrong
*/
public static List<String> getClassNames() throws Exception {
if (classNameList != null) {
return classNameList;
}
FileFilter filter = new FileFilter() {
public boolean accept(File pathname) {
return pathname.isFile() && pathname.getName().endsWith(".class");
}
};
String[] classpaths = System.getProperty("java.class.path", "").split(File.pathSeparator);
List<String> classNames = new ArrayList<String>();
for (String path : classpaths) {
File baseFile = new File(path);
String basePath = baseFile.getPath();
List<File> classFiles = CmsFileUtil.getFiles(path, filter, true);
for (File classFile : classFiles) {
String relativePath = classFile.getPath().substring(basePath.length());
String className = relativePath.replace("" + File.separatorChar, ".").substring(1).replaceFirst(
"\\.class$",
"");
if ((className.indexOf("opencms") > -1) || (className.indexOf("alkacon") > -1)) {
classNames.add(className);
}
}
}
classNameList = classNames;
return classNames;
}
/**
* Returns the currently used database/configuration.<p>
*
* @return he currently used database/configuration
*/
public static String getDbProduct() {
return m_dbProduct;
}
/**
* Does a database import from the given RFS folder to the given VFS folder.<p>
*
* @param importFolder the RFS folder to import from
* @param targetFolder the VFS folder to import into
*/
public static void importData(String importFolder, String targetFolder) {
// turn off exceptions after error logging during setup (won't work otherwise)
OpenCmsTestLogAppender.setBreakOnError(false);
// output a message
System.out.println("\n\n\n----- Starting test case: Importing OpenCms VFS data -----");
// kill any old shell that might have remained from a previous test
if (m_shell != null) {
try {
m_shell.exit();
m_shell = null;
} catch (Throwable t) {
// ignore
}
}
// create a shell instance
m_shell = new CmsShell(getTestDataPath("WEB-INF" + File.separator), null, null, "${user}@${project}>", null);
// open the test script
File script;
FileInputStream stream = null;
CmsObject cms = null;
try {
// start the shell with the base script
script = new File(getTestDataPath("scripts/script_import.txt"));
stream = new FileInputStream(script);
m_shell.start(stream);
// log in the Admin user and switch to the setup project
cms = OpenCms.initCmsObject(OpenCms.getDefaultUsers().getUserGuest());
cms.loginUser("Admin", "admin");
cms.getRequestContext().setCurrentProject(cms.readProject("tempFileProject"));
if (importFolder != null) {
// import the "simpletest" files
importResources(cms, importFolder, targetFolder);
}
// publish the current project by script
script = new File(getTestDataPath("scripts/script_import_publish.txt"));
stream = new FileInputStream(script);
m_shell.start(stream);
OpenCms.getPublishManager().waitWhileRunning();
// switch to the "Offline" project
cms.getRequestContext().setCurrentProject(cms.readProject("Offline"));
cms.getRequestContext().setSiteRoot("/sites/default/");
// output a message
System.out.println("----- Starting test cases -----");
} catch (Throwable t) {
t.printStackTrace(System.err);
fail("Unable to setup OpenCms\n" + CmsException.getStackTraceAsString(t));
}
// turn on exceptions after error logging
OpenCmsTestLogAppender.setBreakOnError(true);
}
/**
* Initializes the path to the test data configuration files
* using the default path.<p>
*/
public static synchronized void initTestDataPath() {
if (m_testDataPath == null) {
m_testDataPath = new ArrayList<String>(4);
// test wether we are instantiated within the
// AllTest suite and therefore the OpenCmsTestProperties are
// already set up:
try {
OpenCmsTestProperties.getInstance();
} catch (RuntimeException rte) {
OpenCmsTestProperties.initialize(org.opencms.test.AllTests.TEST_PROPERTIES_PATH);
}
// set data path
addTestDataPath(OpenCmsTestProperties.getInstance().getTestDataPath());
}
}
/**
* Removes the initialized OpenCms database and all
* temporary files created during the test run.<p>
*/
public static void removeOpenCms() {
// ensure logging does not throw exceptions
OpenCmsTestLogAppender.setBreakOnError(false);
// output a message
m_shell.printPrompt();
System.out.println("----- Test cases finished -----");
// exit the shell
m_shell.exit();
try {
// sleep 0.5 seconds - sometimes other Threads need to finish before the next test case can start
Thread.sleep(500);
} catch (InterruptedException e) {
// ignore
}
// remove the database
removeDatabase();
String path;
// copy the configuration files to re-create the original configuration
String configFolder = getTestDataPath("WEB-INF" + File.separator + "config." + m_dbProduct + File.separator);
copyConfiguration(configFolder);
// remove potentially created "classes, "lib", "backup" etc. folder
path = getTestDataPath("WEB-INF/classes/");
if (path != null) {
CmsFileUtil.purgeDirectory(new File(path));
}
path = getTestDataPath("WEB-INF/logs/publish");
if (path != null) {
CmsFileUtil.purgeDirectory(new File(path));
}
path = getTestDataPath("WEB-INF/lib/");
if (path != null) {
CmsFileUtil.purgeDirectory(new File(path));
}
path = getTestDataPath("WEB-INF/" + CmsSystemInfo.FOLDER_CONFIG_DEFAULT + "backup/");
if (path != null) {
CmsFileUtil.purgeDirectory(new File(path));
}
path = getTestDataPath("WEB-INF/index/");
if ((path != null) && !m_configuration.containsKey("test.keep.searchIndex")) {
CmsFileUtil.purgeDirectory(new File(path));
}
path = getTestDataPath("export/");
if (path != null) {
CmsFileUtil.purgeDirectory(new File(path));
}
}
/**
* Restarts the OpenCms shell.<p>
*/
public static void restartOpenCms() {
// turn off exceptions after error logging during setup (won't work otherwise)
OpenCmsTestLogAppender.setBreakOnError(false);
// output a message
System.out.println("\n\n\n----- Restarting OpenCms -----");
// kill any old shell that might have remained from a previous test
if (m_shell != null) {
try {
m_shell.exit();
m_shell = null;
} catch (Throwable t) {
// ignore
}
}
// create a shell instance
m_shell = new CmsShell(getTestDataPath("WEB-INF" + File.separator), null, null, "${user}@${project}>", null);
// turn on exceptions after error logging
OpenCmsTestLogAppender.setBreakOnError(true);
}
/**
* Sets up a complete OpenCms instance with configuration from the config-ori folder,
* creating the usual projects, and importing a default database.<p>
*
* @param importFolder the folder to import in the "real" FS
* @param targetFolder the target folder of the import in the VFS
* @return an initialized OpenCms context with "Admin" user in the "Offline" project with the site root set to "/"
*/
public static CmsObject setupOpenCms(String importFolder, String targetFolder) {
return setupOpenCms(importFolder, targetFolder, getTestDataPath("WEB-INF/config." + m_dbProduct + "/"), true);
}
/**
* Sets up a complete OpenCms instance with configuration from the config-ori folder,
* creating the usual projects, and importing a default database.<p>
*
* @param importFolder the folder to import in the "real" FS
* @param targetFolder the target folder of the import in the VFS
* @param publish flag to signalize if the publish script should be called
* @return an initialized OpenCms context with "Admin" user in the "Offline" project with the site root set to "/"
*/
public static CmsObject setupOpenCms(String importFolder, String targetFolder, boolean publish) {
return setupOpenCms(importFolder, targetFolder, getTestDataPath("WEB-INF/config." + m_dbProduct + "/"), publish);
}
/**
* Sets up a complete OpenCms instance with configuration from the config-ori folder,
* creating the usual projects, and importing a default database.<p>
*
* @param importFolder the folder to import in the "real" FS
* @param targetFolder the target folder of the import in the VFS
* @param specialConfigFolder the folder that contains the special configuration files for this setup
* @return an initialized OpenCms context with "Admin" user in the "Offline" project with the site root set to "/"
*/
public static CmsObject setupOpenCms(String importFolder, String targetFolder, String specialConfigFolder) {
return setupOpenCms(
importFolder,
targetFolder,
getTestDataPath("WEB-INF/config." + m_dbProduct + "/"),
getTestDataPath(specialConfigFolder),
true);
}
/**
* Sets up a complete OpenCms instance, creating the usual projects,
* and importing a default database.<p>
*
* @param importFolder the folder to import in the "real" FS
* @param targetFolder the target folder of the import in the VFS
* @param configFolder the folder to copy the configuration files
* @param publish publish only if set
*
* @return an initialized OpenCms context with "Admin" user in the "Offline" project with the site root set to "/"
*/
public static CmsObject setupOpenCms(String importFolder, String targetFolder, String configFolder, boolean publish) {
return setupOpenCms(importFolder, targetFolder, configFolder, null, publish);
}
/**
* Sets up a complete OpenCms instance, creating the usual projects,
* and importing a default database.<p>
*
* @param importFolder the folder to import in the "real" FS
* @param targetFolder the target folder of the import in the VFS
* @param configFolder the folder to copy the standard configuration files from
* @param specialConfigFolder the folder that contains the special configuration fiiles for this setup
* @param publish publish only if set
*
* @return an initialized OpenCms context with "Admin" user in the "Offline" project with the site root set to "/"
*/
public static CmsObject setupOpenCms(
String importFolder,
String targetFolder,
String configFolder,
String specialConfigFolder,
boolean publish) {
// intialize a new resource storage
m_resourceStorages = new HashMap<String, OpenCmsTestResourceStorage>();
// turn off exceptions after error logging during setup (won't work otherwise)
OpenCmsTestLogAppender.setBreakOnError(false);
// output a message
System.out.println("\n\n\n----- Starting test case: Importing OpenCms VFS data -----");
// kill any old shell that might have remained from a previous test
if (m_shell != null) {
try {
m_shell.exit();
m_shell = null;
} catch (Throwable t) {
// ignore
}
}
// create the OpenCms "config" folder
File configFile = new File(m_testDataPath.get(0)
+ "WEB-INF"
+ File.separator
+ CmsSystemInfo.FOLDER_CONFIG_DEFAULT);
if (!configFile.exists()) {
configFile.mkdir();
}
// copy the configuration files from the base folder
copyConfiguration(getTestDataPath("WEB-INF/base/"));
// copy the special configuration files from the database folder
copyConfiguration(configFolder);
// copy the configuration files from the special individual folder if required
if (specialConfigFolder != null) {
copyConfiguration(specialConfigFolder);
}
// create a new database first
setupDatabase();
// create a shell instance
m_shell = new CmsShell(getTestDataPath("WEB-INF" + File.separator), null, null, "${user}@${project}>", null);
// open the test script
File script;
FileInputStream stream = null;
CmsObject cms = null;
try {
// start the shell with the base script
script = new File(getTestDataPath("scripts/script_base.txt"));
stream = new FileInputStream(script);
m_shell.start(stream);
// add the default folders by script
script = new File(getTestDataPath("scripts/script_default_folders.txt"));
stream = new FileInputStream(script);
m_shell.start(stream);
// log in the Admin user and switch to the setup project
cms = OpenCms.initCmsObject(OpenCms.getDefaultUsers().getUserGuest());
cms.loginUser("Admin", "admin");
cms.getRequestContext().setCurrentProject(cms.readProject("_setupProject"));
if (importFolder != null) {
// import the "simpletest" files
importResources(cms, importFolder, targetFolder);
}
// create the default projects by script
script = new File(getTestDataPath("scripts/script_default_projects.txt"));
stream = new FileInputStream(script);
m_shell.start(stream);
if (publish) {
// publish the current project by script
script = new File(getTestDataPath("scripts/script_publish.txt"));
stream = new FileInputStream(script);
m_shell.start(stream);
OpenCms.getPublishManager().waitWhileRunning();
} else {
cms.unlockProject(cms.readProject("_setupProject").getUuid());
}
// switch to the "Offline" project
cms.getRequestContext().setCurrentProject(cms.readProject("Offline"));
cms.getRequestContext().setSiteRoot("/sites/default/");
// output a message
System.out.println("----- Starting test cases -----");
} catch (Throwable t) {
t.printStackTrace(System.err);
fail("Unable to setup OpenCms\n" + CmsException.getStackTraceAsString(t));
}
// turn on exceptions after error logging
OpenCmsTestLogAppender.setBreakOnError(true);
// return the initialized cms context Object
return cms;
}
/**
* Adds an additional path to the list of test data configuration files.<p>
*
* @param dataPath the path to add
*/
protected static synchronized void addTestDataPath(String dataPath) {
// check if the db data folder is available
File testDataFolder = new File(dataPath);
if (!testDataFolder.exists()) {
fail("DB setup data not available at " + testDataFolder.getAbsolutePath());
}
String path = CmsFileUtil.normalizePath(testDataFolder.getAbsolutePath() + File.separator);
if (!m_testDataPath.contains(path)) {
m_testDataPath.add(path);
}
}
/**
* Check the setup DB for errors that might have occurred.<p>
*
* @param setupDb the setup DB object to check
*/
protected static void checkErrors(CmsSetupDb setupDb) {
if (!setupDb.noErrors()) {
List<String> errors = setupDb.getErrors();
for (Iterator<String> i = errors.iterator(); i.hasNext();) {
String error = i.next();
System.out.println(error);
}
fail(setupDb.getErrors().get(0));
}
}
/**
* Returns an initialized replacer map.<p>
*
* @param connectionData the connection data to derive the replacer information
*
* @return an initialized replacer map
*/
protected static Map<String, String> getReplacer(ConnectionData connectionData) {
Map<String, String> replacer = new HashMap<String, String>();
replacer.put("${database}", connectionData.m_dbName);
replacer.put("${user}", connectionData.m_userName);
replacer.put("${password}", connectionData.m_userPassword);
replacer.put("${defaultTablespace}", m_defaultTablespace);
replacer.put("${indexTablespace}", m_indexTablespace);
replacer.put("${temporaryTablespace}", m_tempTablespace);
return replacer;
}
/**
* Returns the path to the data files used by the setup wizard.<p>
*
* Whenever possible use this path to ensure that the files
* used for testing are actually the same as for the setup.<p>
*
* @return the path to the data files used by the setup wizard
*/
protected static synchronized String getSetupDataPath() {
if (m_setupDataPath == null) {
// check if the db setup files are available
File setupDataFolder = new File(OpenCmsTestProperties.getInstance().getTestWebappPath());
if (!setupDataFolder.exists()) {
fail("DB setup data not available at " + setupDataFolder.getAbsolutePath());
}
m_setupDataPath = setupDataFolder.getAbsolutePath() + File.separator;
}
// return the path name
return m_setupDataPath;
}
/**
* Returns an initialized DB setup object.<p>
*
* @param connection the connection data
*
* @return the initialized setup DB object
*/
protected static CmsSetupDb getSetupDb(ConnectionData connection) {
// create setup DB instance
CmsSetupDb setupDb = new CmsSetupDb(getSetupDataPath());
// connect to the DB
setupDb.setConnection(
connection.m_jdbcDriver,
connection.m_jdbcUrl,
connection.m_jdbcUrlParams,
connection.m_userName,
connection.m_userPassword);
// check for errors
if (!DB_ORACLE.equals(m_dbProduct)) {
checkErrors(setupDb);
}
return setupDb;
}
/**
* Returns the path to a file in the test data configuration,
* or <code>null</code> if the given file can not be found.<p>
*
* This methods searches the given file in all configured test data paths.
* It returns the file found first.<p>
*
* @param filename the file name to look up
* @return the path to a file in the test data configuration
*/
protected static String getTestDataPath(String filename) {
for (int i = 0; i < m_testDataPath.size(); i++) {
String path = m_testDataPath.get(i);
File file = new File(path + filename);
if (file.exists()) {
if (file.isDirectory()) {
return CmsFileUtil.normalizePath(file.getAbsolutePath() + File.separator);
} else {
return CmsFileUtil.normalizePath(file.getAbsolutePath());
}
}
}
return null;
}
/**
* Imports a resource into the Cms.<p>
*
* @param cms an initialized CmsObject
* @param importFile the name (absolute Path) of the import resource (zip or folder)
* @param targetPath the name (absolute Path) of the target folder in the VFS
* @throws CmsException if something goes wrong
*/
protected static void importResources(CmsObject cms, String importFile, String targetPath) throws CmsException {
OpenCms.getImportExportManager().importData(
cms,
new CmsShellReport(cms.getRequestContext().getLocale()),
new CmsImportParameters(
getTestDataPath(File.separator + "imports" + File.separator + importFile),
targetPath,
true));
}
/**
* Imports a resource from the RFS test directories to the VFS.<p>
*
* The imported resource will be automatically unlocked.<p>
*
* @param cms the current users OpenCms context
* @param rfsPath the RTF path of the resource to import, must be a path accessibly by the current class loader
* @param vfsPath the VFS path for the imported resource
* @param type the type for the imported resource
* @param properties the properties for the imported resource
* @return the imported resource
*
* @throws Exception if the import fails
*/
protected static CmsResource importTestResource(
CmsObject cms,
String rfsPath,
String vfsPath,
int type,
List<CmsProperty> properties) throws Exception {
byte[] content = CmsFileUtil.readFile(rfsPath);
CmsResource result = cms.createResource(vfsPath, type, content, properties);
cms.unlockResource(vfsPath);
return result;
}
/**
* Removes the OpenCms database test instance.<p>
*/
protected static void removeDatabase() {
if (m_defaultConnection != null) {
removeDatabase(m_setupConnection, m_defaultConnection, false);
}
if (m_additionalConnection != null) {
removeDatabase(m_setupConnection, m_additionalConnection, false);
}
}
/**
* Removes the OpenCms database test instance.<p>
*
* @param setupConnection the setup connection
* @param defaultConnection the default connection
* @param handleErrors flag to indicate if errors should be handled/checked
*/
protected static void removeDatabase(
ConnectionData setupConnection,
ConnectionData defaultConnection,
boolean handleErrors) {
CmsSetupDb setupDb = null;
boolean noErrors = true;
try {
setupDb = getSetupDb(defaultConnection);
setupDb.dropTables(m_dbProduct, getReplacer(defaultConnection), handleErrors);
noErrors = setupDb.noErrors();
} catch (Exception e) {
noErrors = false;
} finally {
if (setupDb != null) {
setupDb.closeConnection();
}
}
if (!handleErrors || noErrors) {
try {
setupDb = getSetupDb(setupConnection);
setupDb.dropDatabase(m_dbProduct, getReplacer(defaultConnection), handleErrors);
setupDb.closeConnection();
} catch (Exception e) {
noErrors = false;
} finally {
if (setupDb != null) {
setupDb.closeConnection();
}
}
}
if (handleErrors) {
checkErrors(setupDb);
}
}
/**
* Creates a new OpenCms test database including the tables.<p>
*
* Any existing instance of the test database is forcefully removed first.<p>
*/
protected static void setupDatabase() {
if (m_defaultConnection != null) {
setupDatabase(m_setupConnection, m_defaultConnection, true);
}
if (m_additionalConnection != null) {
setupDatabase(m_setupConnection, m_additionalConnection, true);
}
}
/**
* Creates a new OpenCms test database including the tables.<p>
*
* @param setupConnection the setup connection
* @param defaultConnection the default connection
* @param handleErrors flag to indicate if errors should be handled/checked
*/
protected static void setupDatabase(
ConnectionData setupConnection,
ConnectionData defaultConnection,
boolean handleErrors) {
CmsSetupDb setupDb = null;
boolean noErrors = true;
try {
setupDb = getSetupDb(setupConnection);
setupDb.createDatabase(m_dbProduct, getReplacer(defaultConnection), handleErrors);
noErrors = setupDb.noErrors();
setupDb.closeConnection();
} catch (Exception e) {
noErrors = false;
} finally {
if (setupDb != null) {
setupDb.closeConnection();
}
}
if (!handleErrors || noErrors) {
try {
setupDb = getSetupDb(defaultConnection);
setupDb.createTables(m_dbProduct, getReplacer(defaultConnection), handleErrors);
noErrors = setupDb.noErrors();
setupDb.closeConnection();
} catch (Exception e) {
noErrors = false;
} finally {
if (setupDb != null) {
setupDb.closeConnection();
}
}
}
if (noErrors) {
return;
} else if (handleErrors) {
removeDatabase(setupConnection, defaultConnection, false);
setupDatabase(setupConnection, defaultConnection, false);
} else {
checkErrors(setupDb);
}
}
/**
* Compares two lists of CmsProperty objects and creates a list of all properties which are
* not included in a seperate exclude list.
* @param cms the CmsObject
* @param resourceName the name of the resource the properties belong to
* @param storedResource the stored resource corresponding to the resourcename
* @param excludeList the list of properies to exclude in the test or null
* @return string of non matching properties
* @throws CmsException if something goes wrong
*/
private static String compareProperties(
CmsObject cms,
String resourceName,
OpenCmsTestResourceStorageEntry storedResource,
List<CmsProperty> excludeList) throws CmsException {
String noMatches = "";
List<CmsProperty> storedProperties = storedResource.getProperties();
List<CmsProperty> properties = cms.readPropertyObjects(resourceName, false);
List<CmsProperty> unmatchedProperties;
unmatchedProperties = OpenCmsTestResourceFilter.compareProperties(storedProperties, properties, excludeList);
if (unmatchedProperties.size() > 0) {
noMatches += "[Properies missing " + unmatchedProperties.toString() + "]\n";
}
unmatchedProperties = OpenCmsTestResourceFilter.compareProperties(properties, storedProperties, excludeList);
if (unmatchedProperties.size() > 0) {
noMatches += "[Properies additional " + unmatchedProperties.toString() + "]\n";
}
return noMatches;
}
/**
* Copies the configuration files from the given folder to the "config" folder.
*
* @param newConfig the folder with the configuration files to copy
*/
private static void copyConfiguration(String newConfig) {
File configDir = new File(getTestDataPath("WEB-INF" + File.separatorChar + CmsSystemInfo.FOLDER_CONFIG_DEFAULT));
File configOriDir = new File(newConfig);
FileFilter filter = FileFilterUtils.orFileFilter(
FileFilterUtils.suffixFileFilter(".xml"),
FileFilterUtils.suffixFileFilter(".properties"));
if (configOriDir.exists()) {
File[] oriFiles = configOriDir.listFiles(filter);
boolean initConfigDates = false;
if (m_dateConfigFiles == null) {
m_dateConfigFiles = new long[oriFiles.length];
initConfigDates = true;
}
for (int i = 0; i < oriFiles.length; i++) {
File source = oriFiles[i];
if (source.isFile()) {
// only copy files
String sourceName = source.getAbsolutePath();
File target = new File(configDir, source.getName());
if (initConfigDates) {
m_dateConfigFiles[i] = target.lastModified();
}
String targetName = target.getAbsolutePath();
try {
CmsFileUtil.copy(sourceName, targetName);
target.setLastModified(m_dateConfigFiles[i]);
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
/**
* Compares an access control entry of a resource with a given access control entry.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param ace the access control entry to compare or null if to compare with the stored values
*/
public void assertAce(CmsObject cms, String resourceName, CmsAccessControlEntry ace) {
try {
// create the exclude list
List<CmsAccessControlEntry> excludeList = new ArrayList<CmsAccessControlEntry>();
if (ace != null) {
excludeList.add(ace);
}
// get the stored resource
OpenCmsTestResourceStorageEntry storedResource = m_currentResourceStrorage.get(resourceName);
String noMatches = compareAccessEntries(cms, resourceName, storedResource, excludeList);
// now see if we have collected any no-matches
if (noMatches.length() > 0) {
fail("error comparing ace of resource " + resourceName + " with stored values: " + noMatches);
}
if (ace != null) {
List<CmsAccessControlEntry> resAces = cms.getAccessControlEntries(resourceName);
boolean notFound = true;
Iterator<CmsAccessControlEntry> i = resAces.iterator();
while (i.hasNext()) {
CmsAccessControlEntry resAce = i.next();
if (resAce.getPrincipal().equals(ace.getPrincipal())
&& (resAce.getResource().equals(ace.getResource()))) {
notFound = false;
if (!resAce.equals(ace)) {
fail("[ACE " + ace + " != " + resAce + "]");
}
}
}
if (notFound) {
fail("[ACE not found" + ace + "]");
}
}
} catch (Exception e) {
e.printStackTrace();
fail("cannot read resource " + resourceName + " " + e.getMessage());
}
}
/**
* Compares an access control list of a resource with a given access control permission.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param principal the principal of the permission set or null if to compare with the stored values
* @param permission the permission set to compare
*/
public void assertAcl(CmsObject cms, String resourceName, CmsUUID principal, CmsPermissionSet permission) {
try {
// create the exclude list
List<CmsUUID> excludeList = new ArrayList<CmsUUID>();
if (permission != null) {
excludeList.add(principal);
}
// get the stored resource
OpenCmsTestResourceStorageEntry storedResource = m_currentResourceStrorage.get(resourceName);
String noMatches = compareAccessLists(cms, resourceName, storedResource, excludeList);
// now see if we have collected any no-matches
if (noMatches.length() > 0) {
fail("error comparing permission sets of resource "
+ resourceName
+ " with stored values: "
+ noMatches);
}
if (permission != null) {
CmsAccessControlList resAcls = cms.getAccessControlList(resourceName);
Map<CmsUUID, CmsPermissionSetCustom> permissionMap = resAcls.getPermissionMap();
CmsPermissionSet resPermission = permissionMap.get(principal);
if (resPermission != null) {
if (!resPermission.equals(permission)) {
fail("[Permission set not equal " + principal + ":" + permission + " != " + resPermission + "]");
}
} else {
fail("[Permission set not found " + principal + ":" + permission + "]");
}
}
} catch (Exception e) {
fail("cannot read resource " + resourceName + " " + e.getMessage());
}
}
/**
* Compares an access control list of a resource with a given access control permission.<p>
*
* @param cms the CmsObject
* @param modifiedResource the name of the which had its permissions changed
* @param resourceName the name of the resource to compare
* @param principal the principal of the permission set or null if to compare with the stored values
* @param permission the permission set to compare
*/
public void assertAcl(
CmsObject cms,
String modifiedResource,
String resourceName,
CmsUUID principal,
CmsPermissionSet permission) {
try {
// create the exclude list
List<CmsUUID> excludeList = new ArrayList<CmsUUID>();
if (permission != null) {
excludeList.add(principal);
}
// TODO: This is the code to recalculate the permission set if necessary. Its not completed yet!
Map<CmsUUID, String> parents = getParents(cms, resourceName);
List<CmsAccessControlEntry> aceList = cms.getAccessControlEntries(resourceName);
Iterator<CmsAccessControlEntry> i = aceList.iterator();
while (i.hasNext()) {
CmsAccessControlEntry ace = i.next();
if (ace.getPrincipal().equals(principal)) {
String parent = parents.get(ace.getResource());
if ((!parent.equals(modifiedResource)) && (parent.length() > modifiedResource.length())) {
permission = new CmsPermissionSet(ace.getAllowedPermissions(), ace.getDeniedPermissions());
}
}
}
// get the stored resource
OpenCmsTestResourceStorageEntry storedResource = m_currentResourceStrorage.get(resourceName);
String noMatches = compareAccessLists(cms, resourceName, storedResource, excludeList);
// now see if we have collected any no-matches
if (noMatches.length() > 0) {
fail("error comparing permission sets of resource "
+ resourceName
+ " with stored values: "
+ noMatches);
}
if (permission != null) {
CmsAccessControlList resAcls = cms.getAccessControlList(resourceName);
Map<CmsUUID, CmsPermissionSetCustom> permissionMap = resAcls.getPermissionMap();
CmsPermissionSet resPermission = permissionMap.get(principal);
if (resPermission != null) {
if (!resPermission.equals(permission)) {
fail("[Permission set not equal " + principal + ":" + permission + " != " + resPermission + "]");
}
} else {
fail("[Permission set not found " + principal + ":" + permission + "]");
}
}
} catch (Exception e) {
fail("cannot read resource " + resourceName + " " + e.getMessage());
}
}
/**
* Tests if a pattern can be found in a content string.<p>
* Fails if the pattern is not found.
*
* @param content the content string
* @param pattern the pattern to search for
*/
public void assertContains(String content, String pattern) {
if (content.toLowerCase().indexOf(pattern.toLowerCase()) == -1) {
fail("pattern '" + pattern + "' not found in content");
}
}
/**
* Tests if a pattern cannot be found in a content string.<p>
* Fails if the pattern is found.
*
* @param content the content string
* @param pattern the pattern to search for
*/
public void assertContainsNot(String content, String pattern) {
if (content.toLowerCase().indexOf(pattern.toLowerCase()) != -1) {
fail("pattern '" + pattern + "' found in content");
}
}
/**
* Compares the current content of a (file) resource with a given content.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param content the content to compare
*/
public void assertContent(CmsObject cms, String resourceName, byte[] content) {
try {
// get the actual resource from the vfs
CmsFile file = cms.readFile(resourceName, CmsResourceFilter.ALL);
byte[] fileContent = file.getContents();
if (fileContent.length != file.getLength()) {
fail("[Content length stored " + file.getContents().length + " != " + file.getLength() + "]");
}
if (fileContent.length != content.length) {
fail("[Content length compared " + file.getContents().length + " != " + content.length + "]");
}
for (int i = 0; i < content.length; i++) {
if (fileContent[i] != content[i]) {
fail("[Content compare failed at index " + i + "]");
}
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Tests if the current content date of a resource is equals to the given date.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param dateContent the content date
*/
public void assertDateContent(CmsObject cms, String resourceName, long dateContent) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (res.getDateContent() != dateContent) {
fail("[DateContent "
+ dateContent
+ " i.e. "
+ CmsDateUtil.getHeaderDate(dateContent)
+ " != "
+ res.getDateContent()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateContent())
+ "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Tests if the the current date content of a resource is later than the given date.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param dateContent the content date
*/
public void assertDateContentAfter(CmsObject cms, String resourceName, long dateContent) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (res.getDateContent() < dateContent) {
fail("[DateContent "
+ dateContent
+ " i.e. "
+ CmsDateUtil.getHeaderDate(dateContent)
+ " > "
+ res.getDateContent()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateContent())
+ "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Compares the current date created of a resource with a given date.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param dateCreated the creation date
*/
public void assertDateCreated(CmsObject cms, String resourceName, long dateCreated) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (res.getDateCreated() != dateCreated) {
fail("[DateCreated "
+ dateCreated
+ " i.e. "
+ CmsDateUtil.getHeaderDate(dateCreated)
+ " != "
+ res.getDateCreated()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateCreated())
+ "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Tests if the the creation date of a resource is later then a given date.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param dateCreated the creation date
*/
public void assertDateCreatedAfter(CmsObject cms, String resourceName, long dateCreated) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (res.getDateCreated() < dateCreated) {
fail("[DateCreated "
+ dateCreated
+ " i.e. "
+ CmsDateUtil.getHeaderDate(dateCreated)
+ " > "
+ res.getDateCreated()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateCreated())
+ "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Compares the current expiration date of a resource with a given date.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param dateExpired the expiration date
*/
public void assertDateExpired(CmsObject cms, String resourceName, long dateExpired) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (res.getDateExpired() != dateExpired) {
fail("[DateExpired "
+ dateExpired
+ " i.e. "
+ CmsDateUtil.getHeaderDate(dateExpired)
+ " != "
+ res.getDateExpired()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateExpired())
+ "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Compares the current date last modified of a resource with a given date.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param dateLastModified the last modification date
*/
public void assertDateLastModified(CmsObject cms, String resourceName, long dateLastModified) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (res.getDateLastModified() != dateLastModified) {
fail("[DateLastModified "
+ dateLastModified
+ " i.e. "
+ CmsDateUtil.getHeaderDate(dateLastModified)
+ " != "
+ res.getDateLastModified()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateLastModified())
+ "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Tests if the the current date last modified of a resource is later than the given date.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param dateLastModified the last modification date
*/
public void assertDateLastModifiedAfter(CmsObject cms, String resourceName, long dateLastModified) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (res.getDateLastModified() < dateLastModified) {
fail("[DateLastModified "
+ dateLastModified
+ " i.e. "
+ CmsDateUtil.getHeaderDate(dateLastModified)
+ " > "
+ res.getDateLastModified()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateLastModified())
+ "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Compares the current release date of a resource with a given date.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param dateReleased the release date
*/
public void assertDateReleased(CmsObject cms, String resourceName, long dateReleased) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (res.getDateReleased() != dateReleased) {
fail("[DateReleased "
+ dateReleased
+ " i.e. "
+ CmsDateUtil.getHeaderDate(dateReleased)
+ " != "
+ res.getDateReleased()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateReleased())
+ "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Tests if the given exceptions are equal (or both null).<p>
*
* @param e1 first exception to compare
* @param e2 second exception to compare
*/
public void assertEquals(CmsException e1, CmsException e2) {
if ((e1 == null) && (e2 == null)) {
return;
}
if (((e1 == null) && (e2 != null)) || ((e1 != null) && (e2 == null))) {
fail("Exceptions not equal (not both null)");
}
if ((e1 != null) && (e2 != null)) {
if (!(e1.getClass().equals(e2.getClass()))) {
fail("Exception " + e1.toString() + " does not equal " + e2.toString());
}
if (!(e1.getMessageContainer().getKey().equals(e2.getMessageContainer().getKey()))) {
fail("Exception " + e1.toString() + " does not equal " + e2.toString());
}
}
}
/**
* Tests if the given jobs are internally equal.<p>
* (May have different wrapper classes)
*
* @param j1 first job to compare
* @param j2 second job to compare
* @param comparePublishLists if the publish lists should be compared, too
* @param compareTime if the timestamps should be compared, too
*/
public void assertEquals(
CmsPublishJobBase j1,
CmsPublishJobBase j2,
boolean comparePublishLists,
boolean compareTime) {
CmsPublishJobInfoBean job1 = new OpenCmsTestPublishJobBase(j1).getInfoBean();
CmsPublishJobInfoBean job2 = new OpenCmsTestPublishJobBase(j2).getInfoBean();
if (!(job1.getPublishHistoryId().equals(job2.getPublishHistoryId())
&& job1.getProjectName().equals(job2.getProjectName())
&& job1.getUserId().equals(job2.getUserId())
&& job1.getLocale().equals(job2.getLocale())
&& (job1.getFlags() == job2.getFlags()) && (job1.getSize() == job2.getSize()))) {
fail("Publish jobs are not equal");
}
if (compareTime) {
if (!((job1.getEnqueueTime() == job2.getEnqueueTime()) && (job1.getStartTime() == job2.getStartTime()) && (job1.getFinishTime() == job2.getFinishTime()))) {
fail("Publish jobs do not have the same timestamps");
}
}
if (comparePublishLists) {
if (!job1.getPublishList().toString().equals(job2.getPublishList().toString())) {
fail("Publish jobs do not have the same publish list");
}
}
}
/**
* Tests if the given xml document objects are equals (or both null).<p>
*
* @param d1 first document to compare
* @param d2 second document to compare
*/
public void assertEquals(Document d1, Document d2) {
if ((d1 == null) && (d2 == null)) {
return;
}
if (((d1 == null) && (d2 != null)) || ((d1 != null) && (d2 == null))) {
fail("Documents not equal (not both null)");
}
if ((d1 != null) && (d2 != null)) {
InternalNodeComparator comparator = new InternalNodeComparator();
if (comparator.compare((Node)d1, (Node)d2) != 0) {
fail("Comparison of documents failed: "
+ "name = "
+ d1.getName()
+ ", "
+ "path = "
+ comparator.m_node1.getUniquePath()
+ "\nNode 1:"
+ comparator.m_node1.asXML()
+ "\nNode 2:"
+ comparator.m_node2.asXML());
}
}
}
/**
* Compares a given resource to its stored version containing the state before a CmsObject
* method was called.<p>
*
* @param cms the CmsObject
* @param resource the resource to compare
* @param filter the filter contianing the flags defining which attributes to compare
*/
public void assertFilter(CmsObject cms, CmsResource resource, OpenCmsTestResourceFilter filter) {
try {
// get the stored resource
OpenCmsTestResourceStorageEntry storedResource = m_currentResourceStrorage.get(cms.getSitePath(resource));
// compare the current resource with the stored resource
assertFilter(cms, storedResource, resource, filter);
} catch (Exception e) {
fail("cannot read resource " + cms.getSitePath(resource) + " " + e.getMessage());
}
}
/**
* Compares a stored Cms resource with another Cms resource instance using a specified filter.<p>
*
* @param cms the current user's Cms object
* @param storedResource a stored Cms resource representing the state before an operation
* @param res a Cms resource representing the state after an operation
* @param filter a filter to compare both resources
*/
public void assertFilter(
CmsObject cms,
OpenCmsTestResourceStorageEntry storedResource,
CmsResource res,
OpenCmsTestResourceFilter filter) {
String noMatches = null;
String resourceName = null;
try {
noMatches = "";
resourceName = cms.getRequestContext().removeSiteRoot(res.getRootPath());
// compare the contents if necessary
if (filter.testContents()) {
byte[] contents;
// we only have to do this when comparing files
if (res.isFile()) {
contents = cms.readFile(resourceName, CmsResourceFilter.ALL).getContents();
if (!new String(storedResource.getContents()).equals(new String(contents))) {
noMatches += "[Content does not match]\n";
}
contents = null;
}
}
// compare the date content if necessary
if (filter.testDateContent()) {
if (storedResource.getDateContent() != res.getDateContent()) {
noMatches += "[DateContent "
+ storedResource.getDateContent()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(storedResource.getDateContent())
+ " != "
+ res.getDateContent()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateContent())
+ "]\n";
}
}
// compare the date created if necessary
if (filter.testDateCreated()) {
if (storedResource.getDateCreated() != res.getDateCreated()) {
noMatches += "[DateCreated "
+ storedResource.getDateCreated()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(storedResource.getDateCreated())
+ " != "
+ res.getDateCreated()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateCreated())
+ "]\n";
}
}
if (filter.testDateCreatedSec()) {
if ((storedResource.getDateCreated() / 1000) != (res.getDateCreated() / 1000)) {
noMatches += "[DateCreated "
+ storedResource.getDateCreated()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(storedResource.getDateCreated())
+ " != "
+ res.getDateCreated()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateCreated())
+ "]\n";
}
}
// compare the date expired if necessary
if (filter.testDateExpired()) {
if (storedResource.getDateExpired() != res.getDateExpired()) {
noMatches += "[DateExpired "
+ storedResource.getDateExpired()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(storedResource.getDateExpired())
+ " != "
+ res.getDateExpired()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateExpired())
+ "]\n";
}
}
// compare the date last modified if necessary
if (filter.testDateLastModified()) {
if (storedResource.getDateLastModified() != res.getDateLastModified()) {
noMatches += "[DateLastModified "
+ storedResource.getDateLastModified()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(storedResource.getDateLastModified())
+ " != "
+ res.getDateLastModified()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateLastModified())
+ "]\n";
}
}
if (filter.testDateLastModifiedSec()) {
if ((storedResource.getDateLastModified() / 1000) != (res.getDateLastModified() / 1000)) {
noMatches += "[DateLastModified "
+ storedResource.getDateLastModified()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(storedResource.getDateLastModified())
+ " != "
+ res.getDateLastModified()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateLastModified())
+ "]\n";
}
}
// compare the date last released if necessary
if (filter.testDateReleased()) {
if (storedResource.getDateReleased() != res.getDateReleased()) {
noMatches += "[DateReleased "
+ storedResource.getDateReleased()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(storedResource.getDateReleased())
+ " != "
+ res.getDateReleased()
+ " i.e. "
+ CmsDateUtil.getHeaderDate(res.getDateReleased())
+ "]\n";
}
}
// compare the flags if necessary
if (filter.testFlags()) {
if (storedResource.getFlags() != res.getFlags()) {
noMatches += "[Flags " + storedResource.getFlags() + " != " + res.getFlags() + "]\n";
}
}
// compare the length if necessary
if (filter.testLength()) {
if (storedResource.getLength() != res.getLength()) {
noMatches += "[Length " + storedResource.getLength() + " != " + res.getLength() + "]\n";
}
}
// compare the sibling count if necessary
if (filter.testSiblingCount()) {
if (storedResource.getSiblingCount() != res.getSiblingCount()) {
noMatches += "[SiblingCount "
+ storedResource.getSiblingCount()
+ " != "
+ res.getSiblingCount()
+ "]\n";
}
}
// compare the lockstate if necessary
if (filter.testLock()) {
CmsLock resLock = cms.getLock(res);
if (filter.testName()) {
if (!storedResource.getLock().equals(resLock)) {
noMatches += "[Lockstate " + storedResource.getLock() + " != " + resLock + "]\n";
}
} else {
CmsLock other = storedResource.getLock();
if (!other.getUserId().equals(resLock.getUserId())
|| !other.getProjectId().equals(resLock.getProjectId())
|| !other.getType().equals(resLock.getType())) {
noMatches += "[Lockstate " + storedResource.getLock() + " != " + resLock + "]\n";
}
}
}
// compare the name if necessary
if (filter.testName()) {
if (!storedResource.getName().equals(res.getName())) {
noMatches += "[Name " + storedResource.getName() + " != " + res.getName() + "]\n";
}
}
// compare the project last modified if necessary
if (filter.testProjectLastModified()) {
if (!storedResource.getProjectLastModified().equals(res.getProjectLastModified())) {
noMatches += "[ProjectLastModified "
+ storedResource.getProjectLastModified()
+ " != "
+ res.getProjectLastModified()
+ "]\n";
}
}
// compare the properties if necessary
if (filter.testProperties()) {
noMatches += compareProperties(cms, resourceName, storedResource, null);
}
// compare the acl if necessary
if (filter.testAcl()) {
// compare the ACLs
noMatches += compareAccessLists(cms, resourceName, storedResource, null);
}
// compare the ace if necessary
if (filter.testAce()) {
// compate the ACEs
noMatches += compareAccessEntries(cms, resourceName, storedResource, null);
}
// compare the resource id if necessary
if (filter.testResourceId()) {
if (!storedResource.getResourceId().equals(res.getResourceId())) {
noMatches += "[ResourceId " + storedResource.getResourceId() + " != " + res.getResourceId() + "]\n";
}
}
// compare the state if necessary
if (filter.testState()) {
if (!storedResource.getState().equals(res.getState())) {
noMatches += "[State " + storedResource.getState() + " != " + res.getState() + "]\n";
}
}
// compare the structure id if necessary
if (filter.testStructureId()) {
if (!storedResource.getStructureId().equals(res.getStructureId())) {
noMatches += "[StructureId "
+ storedResource.getStructureId()
+ " != "
+ res.getStructureId()
+ "]\n";
}
}
// compare the touched flag if necessary
if (filter.testTouched()) {
if (storedResource.isTouched() != res.isTouched()) {
noMatches += "[Touched " + storedResource.isTouched() + " != " + res.isTouched() + "]\n";
}
}
// compare the type if necessary
if (filter.testType()) {
if (storedResource.getType() != res.getTypeId()) {
noMatches += "[Type " + storedResource.getType() + " != " + res.getTypeId() + "]\n";
}
}
// compare the user created if necessary
if (filter.testUserCreated()) {
if (!storedResource.getUserCreated().equals(res.getUserCreated())) {
noMatches += createUserFailMessage(
cms,
"UserCreated",
storedResource.getUserLastModified(),
res.getUserLastModified());
noMatches += "\n";
}
}
// compare the user created if necessary
if (filter.testUserLastModified()) {
if (!storedResource.getUserLastModified().equals(res.getUserLastModified())) {
noMatches += createUserFailMessage(
cms,
"UserLastModified",
storedResource.getUserLastModified(),
res.getUserLastModified());
noMatches += "\n";
}
}
// now see if we have collected any no-matches
if (noMatches.length() > 0) {
fail("error comparing resource " + resourceName + " with stored values:\n" + noMatches);
}
} catch (CmsException e) {
fail("cannot assert filter " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Compares a resource to its stored version containing the state before a CmsObject
* method was called.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param filter the filter contianing the flags defining which attributes to compare
*
* @throws CmsException if something goes wrong
*/
public void assertFilter(CmsObject cms, String resourceName, OpenCmsTestResourceFilter filter) throws CmsException {
// get the stored resource
OpenCmsTestResourceStorageEntry storedResource = null;
try {
storedResource = m_currentResourceStrorage.get(resourceName);
} catch (Exception e) {
fail(e.getMessage());
}
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
// compare the current resource with the stored resource
assertFilter(cms, storedResource, res, filter);
}
/**
* Compares a resource to another given resource using a specified filter.<p>
*
* @param cms the current user's Cms object
* @param resourceName1 resource #1
* @param resourceName2 resource #2
* @param filter the filter contianing the flags defining which attributes to compare
*/
public void assertFilter(CmsObject cms, String resourceName1, String resourceName2, OpenCmsTestResourceFilter filter) {
try {
CmsResource res1 = cms.readResource(resourceName1, CmsResourceFilter.ALL);
CmsResource res2 = cms.readResource(resourceName2, CmsResourceFilter.ALL);
// a dummy storage entry gets created here to share existing code
OpenCmsTestResourceStorageEntry dummy = new OpenCmsTestResourceStorageEntry(cms, resourceName2, res2);
assertFilter(cms, dummy, res1, filter);
} catch (CmsException e) {
fail("cannot read either resource "
+ resourceName1
+ " or resource "
+ resourceName2
+ " "
+ CmsException.getStackTraceAsString(e));
}
}
/**
* Tests whether a resource has currently a specified flag set.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param flag a flag to check
*/
public void assertFlags(CmsObject cms, String resourceName, int flag) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
// test if the specified flag is set
if (!((res.getFlags() & flag) > 0)) {
fail("[Flags (" + res.getFlags() + ") do not contain flag (" + flag + ")");
}
} catch (CmsException e) {
fail("Error reading resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Checks if the given resource has the correct history count, also
* check if all entries in the history can be read.<p>
*
* @param cms the current user OpenCms context
* @param resourcename the name of the resource to check the history for
* @param versionCount the expected version number of the resource
*
* @throws Exception if the test fails
*/
public void assertHistory(CmsObject cms, String resourcename, int versionCount) throws Exception {
CmsResource res = cms.readResource(resourcename, CmsResourceFilter.ALL);
// assert we have the right version number
assertEquals(versionCount, res.getVersion());
if (cms.getRequestContext().getCurrentProject().isOnlineProject()) {
// no additional test possible for the online project
return;
}
// read all available versions
List<I_CmsHistoryResource> versions = cms.readAllAvailableVersions(resourcename);
// new files have no historical entry despite the version number may be greater than 1 for siblings
if (res.getState().isNew()) {
assertTrue(versions.isEmpty());
return;
}
// if the resource has not been published yet, the available versions will be one less
boolean unchanged = res.getState().isUnchanged();
// the list is sorted descending, ie. last version is first in list
int count = versionCount - (unchanged ? 0 : 1);
Iterator<I_CmsHistoryResource> i = versions.iterator();
while (i.hasNext()) {
// walk through the list and read all version files
CmsResource hRes = (CmsResource)i.next();
if (hRes instanceof CmsHistoryFile) {
CmsFile hFile = cms.readFile(hRes);
assertEquals(count, hFile.getVersion());
} else {
assertEquals(count, hRes.getVersion());
}
count--;
}
// finally assert the list size if equal to the history version
assertEquals(versionCount - (unchanged ? 0 : 1), versions.size());
}
/**
* Checks if the given resource has the correct history count, also
* check if all entries in the history can be read.<p>
*
* Use this method only for resources that has been restored.<p>
*
* @param cms the current user OpenCms context
* @param resourcename the name of the resource to check the history for
* @param versionCount the expected version number of the resource
*
* @throws Exception if the test fails
*/
public void assertHistoryForRestored(CmsObject cms, String resourcename, int versionCount) throws Exception {
CmsResource res = cms.readResource(resourcename, CmsResourceFilter.ALL);
// assert we have the right version number
assertEquals(versionCount, res.getVersion());
if (cms.getRequestContext().getCurrentProject().isOnlineProject()) {
// no additional test possible for the online project
return;
}
// read all available versions
List<I_CmsHistoryResource> versions = cms.readAllAvailableVersions(resourcename);
// if the resource has not been published yet, the available versions will be one less
boolean unchanged = res.getState().isUnchanged();
// the list is sorted descending, ie. last version is first in list
int count = versionCount - (unchanged ? 0 : 1);
Iterator<I_CmsHistoryResource> i = versions.iterator();
while (i.hasNext()) {
// walk through the list and read all version files
CmsResource hRes = (CmsResource)i.next();
CmsFile hFile = cms.readFile(hRes);
assertEquals(count, hFile.getVersion());
count--;
}
// finally assert the list size if equal to the history version
assertEquals(versionCount - (unchanged ? 0 : 1), versions.size());
}
/**
* Ensures that the given resource is a folder.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to check for a folder
*/
public void assertIsFolder(CmsObject cms, String resourceName) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (!res.isFolder()) {
fail("[Not a folder: " + resourceName + "]");
}
if (res.getLength() != -1) {
fail("[Folder length not -1: " + resourceName + "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Tests if the specified object is identical with another object.<p>
*
* @param o1 an object
* @param o2 another object
*/
public void assertIsIdentical(Object o1, Object o2) {
if (o1 != o2) {
fail("Object " + o1.toString() + " is not identical to " + o2.toString());
}
}
/**
* Tests if the specified object is not identical with another object.<p>
*
* @param o1 an object
* @param o2 another object
*/
public void assertIsNotIdentical(Object o1, Object o2) {
if (o1 == o2) {
fail("Object " + o1.toString() + " is identical to " + o2.toString());
}
}
/**
* Validates if a specified resource is somehow locked to the current user.<p>
*
* @param cms the current user's Cms object
* @param resourceName the name of the resource to validate
*/
public void assertLock(CmsObject cms, String resourceName) {
try {
// get the actual resource from the VFS
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
CmsLock lock = cms.getLock(res);
if (lock.isNullLock() || !lock.isOwnedBy(cms.getRequestContext().getCurrentUser())) {
fail("[Lock "
+ resourceName
+ " requires must be locked to user "
+ cms.getRequestContext().getCurrentUser().getId()
+ "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Validates if a specified resource has a lock of a given type for the current user.<p>
*
* @param cms the current user's Cms object
* @param resourceName the name of the resource to validate
* @param lockType the type of the lock
*
* @see CmsLockType
*/
public void assertLock(CmsObject cms, String resourceName, CmsLockType lockType) {
assertLock(cms, resourceName, lockType, cms.getRequestContext().getCurrentUser());
}
/**
* Validates if a specified resource has a lock of a given type and is locked for a principal.<p>
*
* @param cms the current user's Cms object
* @param resourceName the name of the resource to validate
* @param lockType the type of the lock
* @param user the user to check the lock with
*
* @see CmsLockType
*/
public void assertLock(CmsObject cms, String resourceName, CmsLockType lockType, CmsUser user) {
try {
// get the actual resource from the VFS
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
CmsLock lock = cms.getLock(res);
if (lockType.isUnlocked()) {
if (!lock.isNullLock()) {
fail("[Lock " + resourceName + " must be unlocked]");
}
} else if (lock.isNullLock() || (lock.getType() != lockType) || !lock.isOwnedBy(user)) {
fail("[Lock "
+ resourceName
+ " requires a lock of type "
+ lockType
+ " for user "
+ user.getId()
+ " ("
+ user.getName()
+ ") but has a lock of type "
+ lock.getType()
+ " for user "
+ lock.getUserId()
+ "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Validates the project status of a resource,
* i.e. if a resource has a "red flag" or not.<p>
*
* @param cms the current user's Cms object
* @param resourceName the name of the resource to validate
* @param shouldHaveRedFlag true, if the resource should currently have a red flag
*/
public void assertModifiedInCurrentProject(CmsObject cms, String resourceName, boolean shouldHaveRedFlag) {
boolean hasRedFlag = false;
try {
// get the actual resource from the VFS
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
// the current resource has a red flag if it's state is changed/new/deleted
hasRedFlag = !res.getState().isUnchanged();
// and if it was modified in the current project
hasRedFlag &= (res.getProjectLastModified().equals(cms.getRequestContext().getCurrentProject().getUuid()));
// and if it was modified by the current user
hasRedFlag &= (res.getUserLastModified().equals(cms.getRequestContext().getCurrentUser().getId()));
if (shouldHaveRedFlag && !hasRedFlag) {
// it should have a red flag, but it hasn't
fail("[HasRedFlag " + resourceName + " must have a red flag]");
} else if (hasRedFlag && !shouldHaveRedFlag) {
// it has a red flag, but it shouldn't
fail("[HasRedFlag " + resourceName + " must not have a red flag]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Asserts the given permission string with the access control entry for the given resource and principal.<p>
*
* @param cms the cms object
* @param resourceName the resource name
* @param principal the principal
* @param permissionString the permission string to compare
*
* @throws CmsException if something goes wrong
*/
public void assertPermissionString(
CmsObject cms,
String resourceName,
I_CmsPrincipal principal,
String permissionString) throws CmsException {
Iterator<CmsAccessControlEntry> it = cms.getAccessControlEntries(resourceName).iterator();
while (it.hasNext()) {
CmsAccessControlEntry ace = it.next();
if (ace.getPrincipal().equals(principal.getId())) {
assertEquals(permissionString, ace.getPermissions().getPermissionString()
+ ace.getInheritingString()
+ ace.getResponsibleString());
return;
}
}
if (permissionString != null) {
fail("Ace not found");
}
}
/**
* Compares the current project of a resource with a given CmsProject.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param project the project
*/
public void assertProject(CmsObject cms, String resourceName, CmsProject project) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (!res.getProjectLastModified().equals(project.getUuid())) {
fail("[ProjectLastModified " + project.getUuid() + " != " + res.getProjectLastModified() + "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Compares the current properties of a resource with the stored values and a given, changed property.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param property the changed property
*/
public void assertPropertyChanged(CmsObject cms, String resourceName, CmsProperty property) {
try {
// get the stored resource
OpenCmsTestResourceStorageEntry storedResource = m_currentResourceStrorage.get(resourceName);
// create the exclude list
List<CmsProperty> excludeList = new ArrayList<CmsProperty>();
excludeList.add(property);
String noMatches = compareProperties(cms, resourceName, storedResource, excludeList);
// now see if we have collected any no-matches
if (noMatches.length() > 0) {
fail("error comparing resource " + resourceName + " with stored values: " + noMatches);
}
// test if the property was already in the stored result
List<CmsProperty> storedProperties = storedResource.getProperties();
if (!storedProperties.contains(property)) {
fail("property not found in stored value: " + property);
}
// test if the values of the changed propertiy is correct.
CmsProperty resourceProperty = cms.readPropertyObject(resourceName, property.getName(), false);
if (!resourceProperty.isIdentical(property)) {
fail("property is not identical :" + property + " != " + resourceProperty);
}
} catch (Exception e) {
fail("cannot read resource " + resourceName + " " + e.getMessage());
}
}
/**
* Compares the current properties of a resource with the stored values and a list of changed property.<p>
*
* @param cms an initialized CmsObject
* @param resourceName the name of the resource to compare
* @param excludeList a list of CmsProperties to exclude
*/
public void assertPropertyChanged(CmsObject cms, String resourceName, List<CmsProperty> excludeList) {
try {
// get the stored resource
OpenCmsTestResourceStorageEntry storedResource = m_currentResourceStrorage.get(resourceName);
String noMatches = compareProperties(cms, resourceName, storedResource, excludeList);
// now see if we have collected any no-matches
if (noMatches.length() > 0) {
fail("error comparing resource " + resourceName + " with stored values: " + noMatches);
}
// test if the values of the changed properties are correct and if the properties
// were already in the stored result
String propertyNoMatches = "";
String storedNotFound = "";
Iterator<CmsProperty> i = excludeList.iterator();
List<CmsProperty> storedProperties = storedResource.getProperties();
while (i.hasNext()) {
CmsProperty property = i.next();
CmsProperty resourceProperty = cms.readPropertyObject(resourceName, property.getName(), false);
// test if the property has the same value
if (!resourceProperty.isIdentical(property)) {
propertyNoMatches += "[" + property + " != " + resourceProperty + "]";
}
// test if the property was already in the stored object
if (!storedProperties.contains(property)) {
storedNotFound += "[" + property + "]";
}
}
// now see if we have collected any property no-matches
if (propertyNoMatches.length() > 0) {
fail("error comparing properties for resource " + resourceName + ": " + propertyNoMatches);
}
// now see if we have collected any property not found in the stored original
if (storedNotFound.length() > 0) {
fail("properties not found in stored value: " + storedNotFound);
}
} catch (Exception e) {
fail("cannot read resource " + resourceName + " " + e.getMessage());
}
}
/**
* Tests if a properetydefintion does exist.<p>
*
* @param cms the CmsObject
* @param propertyDefinition the propertsdefinition
*/
public void assertPropertydefinitionExist(CmsObject cms, CmsPropertyDefinition propertyDefinition) {
try {
CmsPropertyDefinition prop = cms.readPropertyDefinition(propertyDefinition.getName());
if (prop != null) {
if (!prop.getName().equals(propertyDefinition.getName())) {
fail("propertsdefinitions do not match: " + prop + " != " + propertyDefinition);
}
} else {
fail("cannot read propertydefitnion" + propertyDefinition);
}
} catch (CmsException e) {
fail("cannot read propertydefitnion" + propertyDefinition + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Tests the list the propertydefinitions are identical to a given list except one exlclude propertydefintion.<p>
*
* @param cms the CmsObject
* @param propertyDefintions the list of propertydefintions
* @param exclude the exclude propertydefinition
*/
public void assertPropertydefinitions(
CmsObject cms,
List<CmsPropertyDefinition> propertyDefintions,
CmsPropertyDefinition exclude) {
try {
String noMatches = "";
List<CmsPropertyDefinition> allPropertydefintions = cms.readAllPropertyDefinitions();
noMatches += comparePropertydefintions(propertyDefintions, allPropertydefintions, exclude);
noMatches += comparePropertydefintions(allPropertydefintions, propertyDefintions, exclude);
if (noMatches.length() > 0) {
fail("missig propertydefintions: " + noMatches);
}
} catch (CmsException e) {
fail("cannot read propertydefitnions " + CmsException.getStackTraceAsString(e));
}
}
/**
* Compares the current properties of a resource with the stored values.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
*/
public void assertPropertyEqual(CmsObject cms, String resourceName) {
try {
// get the stored resource
OpenCmsTestResourceStorageEntry storedResource = m_currentResourceStrorage.get(resourceName);
String noMatches = compareProperties(cms, resourceName, storedResource, null);
// now see if we have collected any no-matches
if (noMatches.length() > 0) {
fail("error comparing resource " + resourceName + " with stored values: " + noMatches);
}
} catch (Exception e) {
fail("cannot read resource " + resourceName + " " + e.getMessage());
}
}
/**
* Compares the current properties of a resource with the stored values and a given, new property.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param property the changed property
*/
public void assertPropertyNew(CmsObject cms, String resourceName, CmsProperty property) {
try {
// get the stored resource
OpenCmsTestResourceStorageEntry storedResource = m_currentResourceStrorage.get(resourceName);
// create the exclude list
List<CmsProperty> excludeList = new ArrayList<CmsProperty>();
excludeList.add(property);
String noMatches = compareProperties(cms, resourceName, storedResource, excludeList);
// now see if we have collected any no-matches
if (noMatches.length() > 0) {
fail("error comparing resource " + resourceName + " with stored values: " + noMatches);
}
// test if the property was already in the stored result
List storedProperties = storedResource.getProperties();
if (storedProperties.contains(property)) {
fail("property already found in stored value: " + property);
}
// test if the values of the changed propertiy is correct.
CmsProperty resourceProperty = cms.readPropertyObject(resourceName, property.getName(), false);
if (!resourceProperty.isIdentical(property)) {
fail("property is not identical :" + property + " != " + resourceProperty);
}
} catch (Exception e) {
fail("cannot read resource " + resourceName + " " + e.getMessage());
}
}
/**
* Compares the current properties of a resource with the stored values and a list of new property.<p>
*
* @param cms an initialized CmsObject
* @param resourceName the name of the resource to compare
* @param excludeList a list of CmsProperties to exclude
*/
public void assertPropertyNew(CmsObject cms, String resourceName, List excludeList) {
try {
// get the stored resource
OpenCmsTestResourceStorageEntry storedResource = m_currentResourceStrorage.get(resourceName);
String noMatches = compareProperties(cms, resourceName, storedResource, excludeList);
// now see if we have collected any no-matches
if (noMatches.length() > 0) {
fail("error comparing resource " + resourceName + " with stored values: " + noMatches);
}
// test if the values of the changed properties are correct and if the properties
// were already in the stored result
String propertyNoMatches = "";
String storedFound = "";
Iterator i = excludeList.iterator();
List storedProperties = storedResource.getProperties();
while (i.hasNext()) {
CmsProperty property = (CmsProperty)i.next();
CmsProperty resourceProperty = cms.readPropertyObject(resourceName, property.getName(), false);
// test if the property has the same value
if (!resourceProperty.isIdentical(property)) {
propertyNoMatches += "[" + property + " != " + resourceProperty + "]";
}
// test if the property was already in the stored object
if (storedProperties.contains(property)) {
storedFound += "[" + property + "]";
}
}
// now see if we have collected any property no-matches
if (propertyNoMatches.length() > 0) {
fail("error comparing properties for resource " + resourceName + ": " + propertyNoMatches);
}
// now see if we have collected any property not found in the stored original
if (storedFound.length() > 0) {
fail("properties already found in stored value: " + storedFound);
}
} catch (Exception e) {
fail("cannot read resource " + resourceName + " " + e.getMessage());
}
}
/**
* Compares the current properties of a resource with the stored values and a given, deleted property.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param property the deleted property
*/
public void assertPropertyRemoved(CmsObject cms, String resourceName, CmsProperty property) {
try {
// create the exclude list
List excludeList = new ArrayList();
excludeList.add(property);
// get the stored resource
OpenCmsTestResourceStorageEntry storedResource = m_currentResourceStrorage.get(resourceName);
String noMatches = compareProperties(cms, resourceName, storedResource, excludeList);
// now see if we have collected any no-matches
if (noMatches.length() > 0) {
fail("error comparing resource " + resourceName + " with stored values: " + noMatches);
}
// test if the property was already in the stored result
List storedProperties = storedResource.getProperties();
if (!storedProperties.contains(property)) {
fail("property not found in stored value: " + property);
}
// test if the values of the changed propertiy is correct.
CmsProperty resourceProperty = cms.readPropertyObject(resourceName, property.getName(), false);
if (resourceProperty != CmsProperty.getNullProperty()) {
fail("property is not removed :" + property + " != " + resourceProperty);
}
} catch (Exception e) {
fail("cannot read resource " + resourceName + " " + e.getMessage());
}
}
/**
* Compares the current properties of a resource with the stored values and a list of deleted properties.<p>
*
* @param cms an initialized CmsObject
* @param resourceName the name of the resource to compare
* @param excludeList a list of CmsProperties to exclude
*/
public void assertPropertyRemoved(CmsObject cms, String resourceName, List excludeList) {
try {
// get the stored resource
OpenCmsTestResourceStorageEntry storedResource = m_currentResourceStrorage.get(resourceName);
String noMatches = compareProperties(cms, resourceName, storedResource, excludeList);
// now see if we have collected any no-matches
if (noMatches.length() > 0) {
fail("error comparing resource " + resourceName + " with stored values: " + noMatches);
}
// test if the values of the changed properties are correct and if the properties
// were already in the stored result
String propertyNotDeleted = "";
String storedNotFound = "";
Iterator i = excludeList.iterator();
List storedProperties = storedResource.getProperties();
List resourceProperties = cms.readPropertyObjects(resourceName, false);
while (i.hasNext()) {
CmsProperty property = (CmsProperty)i.next();
// test if the property has the same value
if (resourceProperties.contains(property)) {
CmsProperty resourceProperty = cms.readPropertyObject(resourceName, property.getName(), false);
propertyNotDeleted += "[" + property + " != " + resourceProperty + "]";
}
// test if the property was already in the stored object
if (!storedProperties.contains(property)) {
storedNotFound += "[" + property + "]";
}
}
// now see if we have collected any property no-matches
if (propertyNotDeleted.length() > 0) {
fail("properties not deleted for " + resourceName + ": " + propertyNotDeleted);
}
// now see if we have collected any property not found in the stored original
if (storedNotFound.length() > 0) {
fail("properties not found in stored value: " + storedNotFound);
}
} catch (Exception e) {
fail("cannot read resource " + resourceName + " " + e.getMessage());
}
}
/**
* Asserts the equality of the two given relations.<p>
*
* @param expected the expected relation
* @param actual the actual result
*/
public void assertRelation(CmsRelation expected, CmsRelation actual) {
assertEquals(expected.getSourceId(), actual.getSourceId());
assertEquals(expected.getSourcePath(), actual.getSourcePath());
assertEquals(expected.getTargetId(), actual.getTargetId());
assertEquals(expected.getTargetPath(), actual.getTargetPath());
assertEquals(expected.getType(), actual.getType());
}
/**
* Compares the current resource id of a resource with a given id.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param resourceId the id
*/
public void assertResourceId(CmsObject cms, String resourceName, CmsUUID resourceId) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (!res.getResourceId().equals(resourceId)) {
fail("[ResourceId] " + resourceId + " != " + res.getResourceId() + "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Ensures that the given resource is of a certain type.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to check
* @param resourceType the resource type to check for
*/
public void assertResourceType(CmsObject cms, String resourceName, int resourceType) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (res.getTypeId() != resourceType) {
fail("[ResourceType " + res.getTypeId() + " != " + resourceType + "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Validates if the current sibling count of a resource matches the given number.<p>
*
* @param cms the current user's Cms object
* @param resourceName the name of the resource to compare
* @param count the number of additional siblings
*/
public void assertSiblingCount(CmsObject cms, String resourceName, int count) {
try {
// get the current resource from the VFS
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (res.getSiblingCount() != count) {
fail("[SiblingCount " + res.getSiblingCount() + " != " + count + "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Validates if the current sibling count of a resource has been incremented
* compared to it's previous sibling count.<p>
*
* @param cms the current user's Cms object
* @param resourceName the name of the resource to compare
* @param increment the number of additional siblings compared to the original state
*/
public void assertSiblingCountIncremented(CmsObject cms, String resourceName, int increment) {
try {
// get the current resource from the VFS
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
// get the previous resource from resource storage
OpenCmsTestResourceStorageEntry entry = m_currentResourceStrorage.get(resourceName);
if (res.getSiblingCount() != (entry.getSiblingCount() + increment)) {
fail("[SiblingCount "
+ res.getSiblingCount()
+ " != "
+ entry.getSiblingCount()
+ "+"
+ increment
+ "]");
}
} catch (Exception e) {
fail("cannot read resource " + resourceName + " " + e.getMessage());
}
}
/**
* Compares the current state of a resource with a given state.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param state the state
*/
public void assertState(CmsObject cms, String resourceName, CmsResourceState state) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (res.getState() != state) {
fail("[State " + state + " != " + res.getState() + "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Compares the current structure id of a resource with a given id.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param structureId the id
*/
public void assertStructureId(CmsObject cms, String resourceName, CmsUUID structureId) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (!res.getStructureId().equals(structureId)) {
fail("[StructureId] " + structureId + " != " + res.getStructureId() + "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Compares the current type of a resource with a given type.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param type the type
*/
public void assertType(CmsObject cms, String resourceName, int type) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (res.getTypeId() != type) {
fail("[State " + type + " != " + res.getTypeId() + "]");
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Compares the user who created a resource with a given user.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param user the last modification user
*/
public void assertUserCreated(CmsObject cms, String resourceName, CmsUser user) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (!res.getUserCreated().equals(user.getId())) {
fail(createUserFailMessage(cms, "UserCreated", user.getId(), res.getUserLastModified()));
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Compares the current user last modified of a resource with a given user.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param user the last modification user
*/
public void assertUserLastModified(CmsObject cms, String resourceName, CmsUser user) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
if (!res.getUserLastModified().equals(user.getId())) {
fail(createUserFailMessage(cms, "UserLastModified", user.getId(), res.getUserLastModified()));
}
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Tests if the current version of a resource is equals to the given version number.<p>
*
* @param cms the CmsObject
* @param resourceName the name of the resource to compare
* @param version the version number to check
*/
public void assertVersion(CmsObject cms, String resourceName, int version) {
try {
// get the actual resource from the vfs
CmsResource res = cms.readResource(resourceName, CmsResourceFilter.ALL);
assertEquals("Version", version, res.getVersion());
} catch (CmsException e) {
fail("cannot read resource " + resourceName + " " + CmsException.getStackTraceAsString(e));
}
}
/**
* Creates a new storage object.<p>
* @param name the name of the storage
*/
public void createStorage(String name) {
OpenCmsTestResourceStorage storage = new OpenCmsTestResourceStorage(name);
m_resourceStorages.put(name, storage);
}
/**
* Should return the additional connection name.<p>
*
* @return the name of the additional connection
*/
public String getConnectionName() {
return "additional";
}
/**
* Returns the name of the database product.<p>
*
* @return returns either oracle or mysql
*/
public String getDatabaseProduct() {
return m_dbProduct;
}
/**
* Gets an precalculate resource state from the storage.<p>
*
* @param resourceName the name of the resource to get the state
* @return precalculated resource state
* @throws Exception in case something goes wrong
*/
public CmsResourceState getPreCalculatedState(String resourceName) throws Exception {
return m_currentResourceStrorage.getPreCalculatedState(resourceName);
}
/**
* Resets the mapping for resourcenames.<p>
*/
public void resetMapping() {
m_currentResourceStrorage.resetMapping();
}
/**
* Sets the mapping for resourcenames.<p>
*
* @param source the source resource name
* @param target the target resource name
*/
public void setMapping(String source, String target) {
m_currentResourceStrorage.setMapping(source, target);
}
/**
* Stores the state (e.g. attributes, properties, content, lock state and ACL) of
* a resource in the internal resource storage.<p>
*
* If the resourceName is the name of a folder in the vfs, all subresoruces are stored as well.
*
* @param cms an initialized CmsObject
* @param resourceName the name of the resource in the vfs
*/
public void storeResources(CmsObject cms, String resourceName) {
storeResources(cms, resourceName, true);
}
/**
* Stores the state (e.g. attributes, properties, content, lock state and ACL) of
* a resource in the internal resource storage.<p>
*
* If the resourceName is the name of a folder in the vfs and storeSubresources is true,
* all subresoruces are stored as well.
*
* @param cms an initialized CmsObject
* @param resourceName the name of the resource in the vfs
* @param storeSubresources indicates to store subresources of folders
*/
public void storeResources(CmsObject cms, String resourceName, boolean storeSubresources) {
String resName = "";
try {
CmsResource resource = cms.readResource(resourceName, CmsResourceFilter.ALL);
// test if the name belongs to a file or folder
if (resource.isFile()) {
m_currentResourceStrorage.add(cms, resourceName, resource);
} else {
// this is a folder, so first add the folder itself to the storage
m_currentResourceStrorage.add(cms, resourceName
+ (resourceName.charAt(resourceName.length() - 1) != '/' ? "/" : ""), resource);
if (!storeSubresources) {
return;
}
// now get all subresources and add them as well
List resources = cms.readResources(resourceName, CmsResourceFilter.ALL);
Iterator i = resources.iterator();
while (i.hasNext()) {
CmsResource res = (CmsResource)i.next();
resName = cms.getSitePath(res);
m_currentResourceStrorage.add(cms, resName, res);
}
}
} catch (CmsException e) {
fail("cannot read resource "
+ resourceName
+ " or "
+ resName
+ " "
+ CmsException.getStackTraceAsString(e));
}
}
/**
* Switches the internal resource storage.<p>
* @param name the name of the storage
* @throws CmsException if the storage was not found
*/
public void switchStorage(String name) throws CmsException {
OpenCmsTestResourceStorage storage = m_resourceStorages.get(name);
if (storage != null) {
m_currentResourceStrorage = storage;
} else {
throw new CmsException(Messages.get().container(Messages.ERR_RESOURCE_STORAGE_NOT_FOUND_0));
}
}
/**
* Deletes the given file from the rfs.<p>
*
* @param absolutePath the absolute path of the file
*/
protected void deleteFile(String absolutePath) {
try {
// sleep 0.5 seconds - sometimes deletion does not work if not waiting
Thread.sleep(500);
} catch (InterruptedException e) {
// ignore
}
File file = new File(absolutePath);
if (file.exists()) {
if (!file.delete()) {
file.deleteOnExit();
}
}
}
/**
* Writes a message to the current output stream.<p>
*
* @param message the message to write
*/
protected void echo(String message) {
try {
System.out.println();
m_shell.printPrompt();
System.out.println(message);
} catch (Throwable t) {
throw new RuntimeException(t);
}
}
/**
* Returns an initialized CmsObject with admin user permissions,
* running in the "/sites/default" site root.<p>
*
* @return an initialized CmsObject with admin user permissions
* @throws CmsException in case of OpenCms access errors
*/
protected CmsObject getCmsObject() throws CmsException {
// log in the Admin user and switch to the setup project
CmsObject cms = OpenCms.initCmsObject(OpenCms.getDefaultUsers().getUserGuest());
cms.loginUser("Admin", "admin");
// switch to the "Offline" project
cms.getRequestContext().setCurrentProject(cms.readProject("Offline"));
cms.getRequestContext().setSiteRoot("/sites/default/");
// init the storage
createStorage(OpenCmsTestResourceStorage.DEFAULT_STORAGE);
switchStorage(OpenCmsTestResourceStorage.DEFAULT_STORAGE);
// return the initialized cms context Object
return cms;
}
/**
* Imports a module (zipfile) from the default module directory,
* creating a temporary project for this.<p>
*
* @param importFile the name of the import module located in the default module directory
*
* @throws Exception if something goes wrong
*
* @see org.opencms.importexport.CmsImportExportManager#importData(CmsObject, I_CmsReport, CmsImportParameters)
*/
protected void importModuleFromDefault(String importFile) throws Exception {
String exportPath = OpenCms.getSystemInfo().getPackagesRfsPath();
String fileName = OpenCms.getSystemInfo().getAbsoluteRfsPathRelativeToWebInf(
exportPath + CmsSystemInfo.FOLDER_MODULES + importFile);
CmsImportParameters params = new CmsImportParameters(fileName, "/", true);
OpenCms.getImportExportManager().importData(
getCmsObject(),
new CmsShellReport(getCmsObject().getRequestContext().getLocale()),
params);
}
/**
* Removes and deletes a storage object.<p>
* @param name the name of the storage
*/
protected void removeStorage(String name) {
OpenCmsTestResourceStorage storage = m_resourceStorages.get(name);
if (storage != null) {
m_resourceStorages.remove(name);
storage = null;
}
}
/**
* Restarts the cms.<p>
*/
protected void restart() {
OpenCmsTestLogAppender.setBreakOnError(false);
// output a message
System.out.println("\n\n\n----- Restarting shell -----");
m_shell.exit();
m_shell = new CmsShell(getTestDataPath("WEB-INF" + File.separator), null, null, "${user}@${project}>", null);
OpenCmsTestLogAppender.setBreakOnError(true);
}
/**
* Compares two vectors of access entries and creates a list of all access control entries which are
* not matching and are not included in a seperate exclude list.
* @param cms the CmsObject
* @param resourceName the name of the resource the properties belong to
* @param storedResource the stored resource corresponding to the resourcename
* @param excludeList the list of ccess entries to exclude in the test or null
* @return string of non matching access entries
* @throws CmsException if something goes wrong
*/
private String compareAccessEntries(
CmsObject cms,
String resourceName,
OpenCmsTestResourceStorageEntry storedResource,
List excludeList) throws CmsException {
String noMatches = "";
List resAce = cms.getAccessControlEntries(resourceName);
List storedAce = storedResource.getAccessControlEntries();
List unmatchedAce;
unmatchedAce = compareAce(resAce, storedAce, excludeList);
if (unmatchedAce.size() > 0) {
noMatches += "[ACE missing " + unmatchedAce.toString() + "]\n";
}
unmatchedAce = compareAce(storedAce, resAce, excludeList);
if (unmatchedAce.size() > 0) {
noMatches += "[ACE missing " + unmatchedAce.toString() + "]\n";
}
return noMatches;
}
/**
* Compares two access lists and creates a list of permission sets which are
* not matching and are not included in a seperate exclude list.
* @param cms the CmsObject
* @param resourceName the name of the resource the properties belong to
* @param storedResource the stored resource corresponding to the resourcename
* @param excludeList the list of permission sets to exclude in the test or null
* @return string of non matching access list entries
* @throws CmsException if something goes wrong
*/
private String compareAccessLists(
CmsObject cms,
String resourceName,
OpenCmsTestResourceStorageEntry storedResource,
List excludeList) throws CmsException {
String noMatches = "";
CmsAccessControlList resList = cms.getAccessControlList(resourceName);
CmsAccessControlList storedList = storedResource.getAccessControlList();
List unmatchedList;
unmatchedList = compareList(resList, storedList, excludeList);
if (unmatchedList.size() > 0) {
noMatches += "[ACL differences " + unmatchedList.toString() + "]\n";
}
unmatchedList = compareList(storedList, resList, excludeList);
if (unmatchedList.size() > 0) {
noMatches += "[ACL differences " + unmatchedList.toString() + "]\n";
}
return noMatches;
}
/**
* Compares two vectors of access control entires.<p>
*
* @param source the source vector to compare
* @param target the destination vector to compare
* @param exclude the exclude list
* @return list of non matching access control entires
*/
private List compareAce(List source, List target, List exclude) {
boolean isOverwriteAll = false;
Iterator itTargets = target.iterator();
while (itTargets.hasNext()) {
CmsAccessControlEntry ace = (CmsAccessControlEntry)itTargets.next();
if (ace.isOverwriteAll()) {
isOverwriteAll = true;
}
}
List result = new ArrayList();
Iterator i = source.iterator();
while (i.hasNext()) {
CmsAccessControlEntry ace = (CmsAccessControlEntry)i.next();
// here would be best to check the path of the overwrite all entry
// but since we have just the resource id, instead of the structure id
// we are not able to do that here :(
if (!target.contains(ace) && !isOverwriteAll) {
result.add(ace);
}
}
// finally match the result list with the exclude list
if (exclude != null) {
Iterator l = exclude.iterator();
while (l.hasNext()) {
CmsAccessControlEntry excludeAce = (CmsAccessControlEntry)l.next();
if (result.contains(excludeAce)) {
result.remove(excludeAce);
}
}
}
return result;
}
/**
* Compares two lists of permission sets.<p>
* @param source the source list to compare
* @param target the destination list to compare
* @param exclude the exclude list
* @return list of non matching permission sets
*/
private List compareList(CmsAccessControlList source, CmsAccessControlList target, List exclude) {
boolean isOverwriteAll = false;
Iterator itTargets = target.getPermissionMap().keySet().iterator();
while (itTargets.hasNext()) {
CmsUUID principalId = (CmsUUID)itTargets.next();
if (principalId.equals(CmsAccessControlEntry.PRINCIPAL_OVERWRITE_ALL_ID)) {
isOverwriteAll = true;
}
}
HashMap result = new HashMap();
Map destinationMap = target.getPermissionMap();
Map sourceMap = source.getPermissionMap();
Iterator i = sourceMap.entrySet().iterator();
while (i.hasNext()) {
Map.Entry entry = (Map.Entry)i.next();
CmsUUID key = (CmsUUID)entry.getKey();
CmsPermissionSet value = (CmsPermissionSet)entry.getValue();
if (destinationMap.containsKey(key)) {
CmsPermissionSet destValue = (CmsPermissionSet)destinationMap.get(key);
if (!destValue.equals(value)) {
result.put(key, key + " " + value + " != " + destValue);
}
} else if (!isOverwriteAll) {
// here would be best to check the path of the overwrite all entry
// but since we have just the resource id, instead of the structure id
// we are not able to do that here :(
result.put(key, "missing " + key);
}
}
// finally match the result list with the exclude list
if (exclude != null) {
Iterator l = exclude.iterator();
while (l.hasNext()) {
CmsUUID excludeUUID = (CmsUUID)l.next();
if (result.containsKey(excludeUUID)) {
result.remove(excludeUUID);
}
}
}
return new ArrayList(result.values());
}
/**
* Compares two lists of propertydefintions excluding an exclude propertydefintion.
* @param source the source list of propertydefintions
* @param target the target list of propertydefintions
* @param exclude the exclude propertydefintion
* @return String of missing propertydefinitions
*/
private String comparePropertydefintions(List source, List target, CmsPropertyDefinition exclude) {
String noMatches = "";
Iterator i = source.iterator();
while (i.hasNext()) {
CmsPropertyDefinition prop = (CmsPropertyDefinition)i.next();
if ((!target.contains(prop)) && (!prop.getName().equals(exclude.getName()))) {
noMatches += "[" + prop + "]";
}
}
return noMatches;
}
/**
* Creates a user compare fail message.<p>
*
* @param cms the current OpenCms user context
* @param message the message to show
* @param user1 the id of the first (expected) user
* @param user2 the id of the second (found) user
* @return a user compare fail message
*
* @throws CmsException if one of the users can't be read
*/
private String createUserFailMessage(CmsObject cms, String message, CmsUUID user1, CmsUUID user2)
throws CmsException {
StringBuffer result = new StringBuffer();
result.append("[");
result.append(message);
result.append(" (");
result.append(cms.readUser(user1).getName());
result.append(") ");
result.append(user1);
result.append(" != (");
result.append(cms.readUser(user2).getName());
result.append(") ");
result.append(user1);
result.append("]");
return result.toString();
}
/**
* Creates a map of all parent resources of a OpenCms resource.<p>
* The resource UUID is used as key, the full resource path is used as the value.
*
* @param cms the CmsObject
* @param resourceName the name of the resource to get the parent map from
* @return HashMap of parent resources
*/
private Map<CmsUUID, String> getParents(CmsObject cms, String resourceName) {
HashMap<CmsUUID, String> parents = new HashMap<CmsUUID, String>();
List<CmsResource> parentResources = new ArrayList<CmsResource>();
try {
// get all parent folders of the current file
parentResources = cms.readPath(resourceName, CmsResourceFilter.IGNORE_EXPIRATION);
} catch (CmsException e) {
// ignore
}
Iterator<CmsResource> k = parentResources.iterator();
while (k.hasNext()) {
// add the current folder to the map
CmsResource curRes = k.next();
parents.put(curRes.getResourceId(), curRes.getRootPath());
}
return parents;
}
/**
* Initializes the OpenCms/database configuration
* by reading the appropriate values from opencms.properties.<p>
*/
private void initConfiguration() {
if (m_configuration == null) {
initTestDataPath();
m_configuration = OpenCmsTestProperties.getInstance().getConfiguration();
m_dbProduct = OpenCmsTestProperties.getInstance().getDbProduct();
int index = 0;
boolean cont;
do {
cont = false;
if (m_configuration.containsKey(OpenCmsTestProperties.PROP_TEST_DATA_PATH + "." + index)) {
addTestDataPath(m_configuration.get(OpenCmsTestProperties.PROP_TEST_DATA_PATH + "." + index));
cont = true;
index++;
}
} while (cont);
String propertyFile = "";
try {
propertyFile = getTestDataPath("WEB-INF/config." + m_dbProduct + "/opencms.properties");
m_configuration = new CmsParameterConfiguration(propertyFile);
} catch (IOException e) {
fail("Error while reading configuration from '" + propertyFile + "'\n" + e.toString());
return;
}
String key = "setup";
m_setupConnection = new ConnectionData();
m_setupConnection.m_dbName = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL + "." + key + "." + "dbName");
m_setupConnection.m_jdbcUrl = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL + "." + key + "." + "jdbcUrl");
m_setupConnection.m_userName = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL + "." + key + "." + "user");
m_setupConnection.m_userPassword = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ "password");
m_setupConnection.m_jdbcDriver = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ CmsDbPool.KEY_JDBC_DRIVER);
m_setupConnection.m_jdbcUrl = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ CmsDbPool.KEY_JDBC_URL);
m_setupConnection.m_jdbcUrlParams = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ CmsDbPool.KEY_JDBC_URL_PARAMS);
key = "default";
m_defaultConnection = new ConnectionData();
m_defaultConnection.m_dbName = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL + "." + key + "." + "dbName");
m_defaultConnection.m_userName = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ CmsDbPool.KEY_USERNAME);
m_defaultConnection.m_userPassword = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ CmsDbPool.KEY_PASSWORD);
m_defaultConnection.m_jdbcDriver = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ CmsDbPool.KEY_JDBC_DRIVER);
m_defaultConnection.m_jdbcUrl = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ CmsDbPool.KEY_JDBC_URL);
m_defaultConnection.m_jdbcUrlParams = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ CmsDbPool.KEY_JDBC_URL_PARAMS);
key = getConnectionName();
if (m_configuration.get(CmsDbPool.KEY_DATABASE_POOL + "." + key + "." + "dbName") != null) {
m_additionalConnection = new ConnectionData();
m_additionalConnection.m_dbName = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ "dbName");
m_additionalConnection.m_userName = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ CmsDbPool.KEY_USERNAME);
m_additionalConnection.m_userPassword = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ CmsDbPool.KEY_PASSWORD);
m_additionalConnection.m_jdbcDriver = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ CmsDbPool.KEY_JDBC_DRIVER);
m_additionalConnection.m_jdbcUrl = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ CmsDbPool.KEY_JDBC_URL);
m_additionalConnection.m_jdbcUrlParams = m_configuration.get(CmsDbPool.KEY_DATABASE_POOL
+ "."
+ key
+ "."
+ CmsDbPool.KEY_JDBC_URL_PARAMS);
}
m_defaultTablespace = m_configuration.get("db.oracle.defaultTablespace");
m_indexTablespace = m_configuration.get("db.oracle.indexTablespace");
m_tempTablespace = m_configuration.get("db.oracle.temporaryTablespace");
System.out.println("----- Starting tests on database "
+ m_dbProduct
+ " ("
+ m_setupConnection.m_jdbcUrl
+ ") "
+ "-----");
}
}
}
| Changed catch block. | test/org/opencms/test/OpenCmsTestCase.java | Changed catch block. | <ide><path>est/org/opencms/test/OpenCmsTestCase.java
<ide> try {
<ide> propertyFile = getTestDataPath("WEB-INF/config." + m_dbProduct + "/opencms.properties");
<ide> m_configuration = new CmsParameterConfiguration(propertyFile);
<del> } catch (IOException e) {
<add> } catch (Exception e) {
<ide> fail("Error while reading configuration from '" + propertyFile + "'\n" + e.toString());
<ide> return;
<ide> } |
|
Java | mit | c6c78a232e2fa812346aa6b9c19f785166d7189d | 0 | OrrinWick/Lab6 | /*
* Copyright (c) 2010, Oracle.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the distribution.
* * Neither the name of Oracle nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* changing things up :)
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
* TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/* Anagram Game Application */
package com.toy.anagrams.lib;
/**
* Implementation of the logic for the Anagram Game application.
* New comment line apparently :P
*/
final class StaticWordLibrary extends WordLibrary {
private static final String[] WORD_LIST = {
"abstraction",
"ambiguous",
"arithmetic",
"backslash",
"bitmap",
"circumstance",
"combination",
"consequently",
"consortium",
"decrementing",
"dependency",
"disambiguate",
"dynamic",
"encapsulation",
"equivalent",
"expression",
"facilitate",
"fragment",
"hexadecimal",
"implementation",
"indistinguishable",
"inheritance",
"internet",
"java",
"localization",
"microprocessor",
"navigation",
"optimization",
"parameter",
"patrick",
"pickle",
"polymorphic",
"rigorously",
"simultaneously",
"specification",
"structure",
"lexical",
"likewise",
"management",
"manipulate",
"mathematics",
"hotjava",
"vertex",
"unsigned",
"traditional"};
private static final String[] SCRAMBLED_WORD_LIST = {
"batsartcoin",
"maibuguos",
"ratimhteci",
"abkclssha",
"ibmtpa",
"iccrmutsnaec",
"ocbmnitaoni",
"ocsnqeeutnyl",
"ocsnroitmu",
"edrcmeneitgn",
"edepdnneyc",
"idasbmgiauet",
"ydanicm",
"neacsplutaoni",
"qeiuaveltn",
"xerpseisno",
"aficilatet",
"rfgaemtn",
"ehaxedicalm",
"milpmeneatitno",
"niidtsniugsiahleb",
"niehiratcen",
"nietnret",
"ajav",
"olacilazitno",
"imrcpoorecssro",
"anivagitno",
"poitimazitno",
"aparemert",
"aprtcki",
"ipkcel",
"opylomprich",
"irogorsuyl",
"isumtlnaoesuyl",
"psceficitaoni",
"tsurtcreu",
"elixalc",
"ilekiwse",
"amanegemtn",
"aminupalet",
"amhtmetacsi",
"ohjtvaa",
"evtrxe",
"nuisngde",
"rtdatioialn"
};
final static WordLibrary DEFAULT = new StaticWordLibrary();
/**
* Singleton class.
*/
private StaticWordLibrary() {
}
/**
* Gets the word at a given index.
* @param idx index of required word
* @return word at that index in its natural form
*/
public String getWord(int idx) {
return WORD_LIST[idx];
}
/**
* Gets the word at a given index in its scrambled form.
* @param idx index of required word
* @return word at that index in its scrambled form
*/
public String getScrambledWord(int idx) {
return SCRAMBLED_WORD_LIST[idx];
}
/**
* Gets the number of words in the library.
* @return the total number of plain/scrambled word pairs in the library
*/
public int getSize() {
return WORD_LIST.length;
}
/**
* Checks whether a user's guess for a word at the given index is correct.
* @param idx index of the word guessed
* @param userGuess the user's guess for the actual word
* @return true if the guess was correct; false otherwise
*/
public boolean isCorrect(int idx, String userGuess) {
return userGuess.equals(getWord(idx));
}
}
| src/com/toy/anagrams/lib/StaticWordLibrary.java | /*
* Copyright (c) 2010, Oracle.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the distribution.
* * Neither the name of Oracle nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
* TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/* Anagram Game Application */
package com.toy.anagrams.lib;
/**
* Implementation of the logic for the Anagram Game application.
* New comment line apparently :P
*/
final class StaticWordLibrary extends WordLibrary {
private static final String[] WORD_LIST = {
"abstraction",
"ambiguous",
"arithmetic",
"backslash",
"bitmap",
"circumstance",
"combination",
"consequently",
"consortium",
"decrementing",
"dependency",
"disambiguate",
"dynamic",
"encapsulation",
"equivalent",
"expression",
"facilitate",
"fragment",
"hexadecimal",
"implementation",
"indistinguishable",
"inheritance",
"internet",
"java",
"localization",
"microprocessor",
"navigation",
"optimization",
"parameter",
"patrick",
"pickle",
"polymorphic",
"rigorously",
"simultaneously",
"specification",
"structure",
"lexical",
"likewise",
"management",
"manipulate",
"mathematics",
"hotjava",
"vertex",
"unsigned",
"traditional"};
private static final String[] SCRAMBLED_WORD_LIST = {
"batsartcoin",
"maibuguos",
"ratimhteci",
"abkclssha",
"ibmtpa",
"iccrmutsnaec",
"ocbmnitaoni",
"ocsnqeeutnyl",
"ocsnroitmu",
"edrcmeneitgn",
"edepdnneyc",
"idasbmgiauet",
"ydanicm",
"neacsplutaoni",
"qeiuaveltn",
"xerpseisno",
"aficilatet",
"rfgaemtn",
"ehaxedicalm",
"milpmeneatitno",
"niidtsniugsiahleb",
"niehiratcen",
"nietnret",
"ajav",
"olacilazitno",
"imrcpoorecssro",
"anivagitno",
"poitimazitno",
"aparemert",
"aprtcki",
"ipkcel",
"opylomprich",
"irogorsuyl",
"isumtlnaoesuyl",
"psceficitaoni",
"tsurtcreu",
"elixalc",
"ilekiwse",
"amanegemtn",
"aminupalet",
"amhtmetacsi",
"ohjtvaa",
"evtrxe",
"nuisngde",
"rtdatioialn"
};
final static WordLibrary DEFAULT = new StaticWordLibrary();
/**
* Singleton class.
*/
private StaticWordLibrary() {
}
/**
* Gets the word at a given index.
* @param idx index of required word
* @return word at that index in its natural form
*/
public String getWord(int idx) {
return WORD_LIST[idx];
}
/**
* Gets the word at a given index in its scrambled form.
* @param idx index of required word
* @return word at that index in its scrambled form
*/
public String getScrambledWord(int idx) {
return SCRAMBLED_WORD_LIST[idx];
}
/**
* Gets the number of words in the library.
* @return the total number of plain/scrambled word pairs in the library
*/
public int getSize() {
return WORD_LIST.length;
}
/**
* Checks whether a user's guess for a word at the given index is correct.
* @param idx index of the word guessed
* @param userGuess the user's guess for the actual word
* @return true if the guess was correct; false otherwise
*/
public boolean isCorrect(int idx, String userGuess) {
return userGuess.equals(getWord(idx));
}
}
| Update StaticWordLibrary.java
Changed online :P | src/com/toy/anagrams/lib/StaticWordLibrary.java | Update StaticWordLibrary.java | <ide><path>rc/com/toy/anagrams/lib/StaticWordLibrary.java
<ide> * contributors may be used to endorse or promote products derived
<ide> * from this software without specific prior written permission.
<ide> *
<add> * changing things up :)
<ide> * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
<ide> * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
<ide> * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
|
JavaScript | mit | 2720ace605974b6f2589d89010da871372f2b527 | 0 | pivotalexperimental/jelly,pivotalexperimental/jelly | if(!window.Jelly) Jelly = new Object();
(Jelly.defineAjaxWithJellyFunctions = function($) {
$.ajaxWithJelly = function(params) {
$.ajax($.ajaxWithJelly.params(params));
};
if ($.fn.ajaxForm) {
$.fn.ajaxFormWithJelly = function(params) {
this.ajaxForm($.ajaxWithJelly.params(params));
};
}
$.ajaxWithJelly.params = function(otherParams) {
otherParams = otherParams || {};
if (otherParams.type && otherParams.type != "GET") {
otherParams['data'] = $.extend(otherParams['data'], {
authenticity_token: window._token
});
}
var observers = otherParams.observers || Jelly.observers;
return $.extend({
dataType: 'json',
cache: false,
success : function(callbacks) {
Jelly.notifyObservers.call(observers, callbacks);
}
}, otherParams);
};
})(jQuery);
| generators/jelly/templates/javascripts/ajax_with_jelly.js | if(!window.Jelly) Jelly = new Object();
(Jelly.defineAjaxWithJellyFunctions = function($) {
$.ajaxWithJelly = function(params) {
$.ajax($.ajaxWithJelly.params(params));
};
if ($.fn.ajaxForm) {
$.fn.ajaxFormWithJelly = function(params) {
this.ajaxForm($.ajaxWithJelly.params(params));
};
}
$.ajaxWithJelly.params = function(otherParams) {
otherParams = otherParams || {};
if (otherParams.type && otherParams.type != "GET") {
otherParams['data'] = $.extend(otherParams['data'], {
authenticity_token: window._token
});
}
var observers = otherParams.observers || Jelly.observers;
return $.extend({
dataType: 'json',
cache: false,
success : Jelly.notifyObservers.bind(observers)
}, otherParams);
};
})(jQuery);
| Going back to make things easier to test.
| generators/jelly/templates/javascripts/ajax_with_jelly.js | Going back to make things easier to test. | <ide><path>enerators/jelly/templates/javascripts/ajax_with_jelly.js
<ide> return $.extend({
<ide> dataType: 'json',
<ide> cache: false,
<del> success : Jelly.notifyObservers.bind(observers)
<add> success : function(callbacks) {
<add> Jelly.notifyObservers.call(observers, callbacks);
<add> }
<ide> }, otherParams);
<ide> };
<ide> })(jQuery); |
|
Java | apache-2.0 | 896f4e210ca64a64b661a4c84d078cfd7f8f8925 | 0 | GovernmentCommunicationsHeadquarters/Gaffer,gchq/Gaffer,GovernmentCommunicationsHeadquarters/Gaffer,gchq/Gaffer,GovernmentCommunicationsHeadquarters/Gaffer,gchq/Gaffer,GovernmentCommunicationsHeadquarters/Gaffer,gchq/Gaffer | package uk.gov.gchq.gaffer.accumulostore.operation.hdfs.reducer;
import org.apache.accumulo.core.data.ByteSequence;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Value;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Reducer;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import uk.gov.gchq.gaffer.accumulostore.key.AccumuloElementConverter;
import uk.gov.gchq.gaffer.accumulostore.key.MockAccumuloElementConverter;
import uk.gov.gchq.gaffer.accumulostore.utils.AccumuloStoreConstants;
import uk.gov.gchq.gaffer.commonutil.StringUtil;
import uk.gov.gchq.gaffer.commonutil.TestGroups;
import uk.gov.gchq.gaffer.store.schema.Schema;
import uk.gov.gchq.gaffer.store.schema.SchemaEdgeDefinition;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import static org.mockito.BDDMockito.given;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static uk.gov.gchq.gaffer.hdfs.operation.handler.job.factory.JobFactory.SCHEMA;
public class AccumuloKeyValueReducerTest {
@Before
@After
public void cleanUp() {
MockAccumuloElementConverter.cleanUp();
}
@Test
public void shouldGetGroupFromElementConverter() throws IOException, InterruptedException {
// Given
MockAccumuloElementConverter.mock = mock(AccumuloElementConverter.class);
final Key key = mock(Key.class);
final List<Value> values = Arrays.asList(mock(Value.class), mock(Value.class));
final Reducer.Context context = mock(Reducer.Context.class);
final Configuration conf = mock(Configuration.class);
final Schema schema = new Schema.Builder()
.edge(TestGroups.ENTITY, new SchemaEdgeDefinition())
.build();
final ByteSequence colFamData = mock(ByteSequence.class);
final byte[] colFam = StringUtil.toBytes(TestGroups.ENTITY);
given(context.getConfiguration()).willReturn(conf);
given(context.getCounter(any(), any())).willReturn(mock(Counter.class));
given(conf.get(SCHEMA)).willReturn(StringUtil.toString(schema.toCompactJson()));
given(conf.get(AccumuloStoreConstants.ACCUMULO_ELEMENT_CONVERTER_CLASS)).willReturn(MockAccumuloElementConverter.class.getName());
given(colFamData.getBackingArray()).willReturn(colFam);
given(key.getColumnFamilyData()).willReturn(colFamData);
given(MockAccumuloElementConverter.mock.getGroupFromColumnFamily(colFam)).willReturn(TestGroups.ENTITY);
final AccumuloKeyValueReducer reducer = new AccumuloKeyValueReducer();
reducer.setup(context);
// When
reducer.reduce(key, values, context);
// Then
verify(MockAccumuloElementConverter.mock, times(1)).getGroupFromColumnFamily(colFam);
}
} | store-implementation/accumulo-store/src/test/java/uk/gov/gchq/gaffer/accumulostore/operation/hdfs/reducer/AccumuloKeyValueReducerTest.java | package uk.gov.gchq.gaffer.accumulostore.operation.hdfs.reducer;
import org.apache.accumulo.core.data.ByteSequence;
import org.apache.accumulo.core.data.Key;
import org.apache.accumulo.core.data.Value;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.Reducer;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import uk.gov.gchq.gaffer.accumulostore.key.AccumuloElementConverter;
import uk.gov.gchq.gaffer.accumulostore.key.MockAccumuloElementConverter;
import uk.gov.gchq.gaffer.accumulostore.utils.AccumuloStoreConstants;
import uk.gov.gchq.gaffer.commonutil.StringUtil;
import uk.gov.gchq.gaffer.commonutil.TestGroups;
import uk.gov.gchq.gaffer.store.schema.Schema;
import uk.gov.gchq.gaffer.store.schema.SchemaEdgeDefinition;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import static org.mockito.BDDMockito.given;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static uk.gov.gchq.gaffer.hdfs.operation.handler.job.factory.JobFactory.SCHEMA;
public class AccumuloKeyValueReducerTest {
@Before
@After
public void cleanUp() {
MockAccumuloElementConverter.cleanUp();
}
@Test
public void shouldGetGroupFromColumnFamily() throws IOException, InterruptedException {
// Given
MockAccumuloElementConverter.mock = mock(AccumuloElementConverter.class);
final Key key = mock(Key.class);
final List<Value> values = Arrays.asList(mock(Value.class), mock(Value.class));
final Reducer.Context context = mock(Reducer.Context.class);
final Configuration conf = mock(Configuration.class);
final Schema schema = new Schema.Builder()
.edge(TestGroups.ENTITY, new SchemaEdgeDefinition())
.build();
final ByteSequence colFamData = mock(ByteSequence.class);
final byte[] colFam = StringUtil.toBytes(TestGroups.ENTITY);
given(context.getConfiguration()).willReturn(conf);
given(context.getCounter(any(), any())).willReturn(mock(Counter.class));
given(conf.get(SCHEMA)).willReturn(StringUtil.toString(schema.toCompactJson()));
given(conf.get(AccumuloStoreConstants.ACCUMULO_ELEMENT_CONVERTER_CLASS)).willReturn(MockAccumuloElementConverter.class.getName());
given(colFamData.getBackingArray()).willReturn(colFam);
given(key.getColumnFamilyData()).willReturn(colFamData);
given(MockAccumuloElementConverter.mock.getGroupFromColumnFamily(colFam)).willReturn(TestGroups.ENTITY);
final AccumuloKeyValueReducer reducer = new AccumuloKeyValueReducer();
reducer.setup(context);
// When
reducer.reduce(key, values, context);
// Then
verify(MockAccumuloElementConverter.mock, times(1)).getGroupFromColumnFamily(colFam);
}
} | gh-1535 - renamed method
| store-implementation/accumulo-store/src/test/java/uk/gov/gchq/gaffer/accumulostore/operation/hdfs/reducer/AccumuloKeyValueReducerTest.java | gh-1535 - renamed method | <ide><path>tore-implementation/accumulo-store/src/test/java/uk/gov/gchq/gaffer/accumulostore/operation/hdfs/reducer/AccumuloKeyValueReducerTest.java
<ide> }
<ide>
<ide> @Test
<del> public void shouldGetGroupFromColumnFamily() throws IOException, InterruptedException {
<add> public void shouldGetGroupFromElementConverter() throws IOException, InterruptedException {
<ide> // Given
<ide> MockAccumuloElementConverter.mock = mock(AccumuloElementConverter.class);
<ide> final Key key = mock(Key.class); |
|
Java | bsd-3-clause | 1e8a1ef1baece561ae08ce39898c7e052b06116a | 0 | NCIP/camod,NCIP/camod,NCIP/camod,NCIP/camod | /**
* @author sguruswami
*
* $Id: ViewModelAction.java,v 1.69 2009-06-01 17:02:46 pandyas Exp $
*
* $Log: not supported by cvs2svn $
* Revision 1.68 2009/05/20 17:16:34 pandyas
* modified for gforge #17325 Upgrade caMOD to use caBIO 4.x and EVS 4.x to get data
*
* Revision 1.67 2009/03/25 16:24:58 pandyas
* modified for #17833 Make sure all references to Tranplantation are properly named
*
* Revision 1.66 2009/03/13 17:03:46 pandyas
* modified for #19205 Sort therapies in the order they are entered
*
* Revision 1.65 2008/08/14 17:07:03 pandyas
* remove debug line
*
* Revision 1.64 2008/08/14 17:01:42 pandyas
* modified debug line to use log
*
* Revision 1.63 2008/08/01 14:15:10 pandyas
* Modifed to prevent SQL inject - added HTTP Header clean
* App scan performed on July 30, 2008
*
* Revision 1.62 2008/07/28 17:19:02 pandyas
* Modifed to prevent SQL inject - added HTTP Header
* App scan performed on July 24, 2008
*
* Revision 1.61 2008/07/21 18:08:31 pandyas
* Modified to prevent SQL injection
* Scan performed on July 21, 2008
*
* Revision 1.60 2008/07/17 19:05:26 pandyas
* Modified to clean header to prevent SQL injection/Cross-Site Scripting
* Scan performed on July 16, 2008 by IRT
*
* Revision 1.59 2008/06/30 18:18:28 pandyas
* Removed code originally added for security scan when it caused null pointer errors
*
* Revision 1.58 2008/06/30 15:29:05 pandyas
* Modified to prevent Cross-Site Scripting
* Cleaned parameter name before proceeding
* Fixed code added in previous version
*
* Revision 1.57 2008/05/27 14:36:40 pandyas
* Modified to prevent SQL injection
* Cleaned HTTP Header before proceeding
* Re: Apps Scan run 05/23/2008
*
* Revision 1.56 2008/02/05 17:10:09 pandyas
* Removed debug statement for build to dev
*
* Revision 1.55 2008/02/05 17:09:34 pandyas
* Removed debug statement for build to dev
*
* Revision 1.54 2008/01/31 22:27:52 pandyas
* remove log printouts now that bug is resolved
*
* Revision 1.53 2008/01/31 22:23:22 pandyas
* remove log printouts now that bug is resolved
*
* Revision 1.52 2008/01/31 17:09:54 pandyas
* Modified to send new gene identifier (entrez gene id) to caBIO from new object location
*
* Revision 1.51 2008/01/28 18:45:18 pandyas
* Modified to debug caBIO data not returning to caMOD on dev
*
* Revision 1.50 2008/01/16 20:09:31 pandyas
* removed caBIO logging so the page renders when connection to caBIO fails
*
* Revision 1.49 2008/01/16 18:29:57 pandyas
* Renamed value to Transplant for #8290
*
* Revision 1.48 2008/01/10 15:55:01 pandyas
* modify output for final dev deployment
*
* Revision 1.47 2008/01/02 17:57:44 pandyas
* modified for #816 Connection to caELMIR - retrieve data for therapy search page
*
* Revision 1.46 2007/12/27 22:32:33 pandyas
* Modified for feature #8816 Connection to caELMIR - retrieve data for therapy search page
* Also added code to display Therapy link when only caELMIR data is available for a study
*
* Revision 1.45 2007/12/27 21:44:00 pandyas
* re-commit - changes did not show up in project
*
* Revision 1.44 2007/12/18 13:31:32 pandyas
* Added populate method for study data from caELMIRE for integration of Therapy study data
*
* Revision 1.43 2007/12/17 18:03:22 pandyas
* Removed * in searchFilter used for getting e-mail from LDAP
* Apps Support ticket was submitted (31169 - incorrect e-mail associated with my caMOD account) stating:
*
* Cheryl Marks submitted a ticket to NCICB Application Support in which she requested that the e-mail address associated with her account in the "User Settings" screen in caMOD be corrected. She has attempted to correct it herself, but because the program queries the LDAP Server for the e-mail address, her corrections were not retained.
*
* Revision 1.42 2007/12/04 13:49:19 pandyas
* Modified code for #8816 Connection to caELMIR - retrieve data for therapy search page
*
* Revision 1.41 2007/11/25 23:34:23 pandyas
* Initial version for feature #8816 Connection to caELMIR - retrieve data for therapy search page
*
* Revision 1.40 2007/10/31 18:39:30 pandyas
* Fixed #8188 Rename UnctrlVocab items to text entries
* Fixed #8290 Rename graft object into transplant object
*
* Revision 1.39 2007/09/14 18:53:37 pandyas
* Fixed Bug #8954: link to invivo detail page does not work
*
* Revision 1.38 2007/09/12 19:36:40 pandyas
* modified debug statements for build to stage tier
*
* Revision 1.37 2007/08/07 19:49:46 pandyas
* Removed reference to Transplant as per VCDE comments and after modification to object definition for CDE
*
* Revision 1.36 2007/08/07 18:26:20 pandyas
* Renamed to GRAFT as per VCDE comments
*
* Revision 1.35 2007/07/31 12:02:55 pandyas
* VCDE silver level and caMOD 2.3 changes
*
* Revision 1.34 2007/06/19 20:42:59 pandyas
* Users not logged in can not access the session property to check the model species. Therefore, we must show the attribute for all models.
*
* Revision 1.33 2007/06/19 18:39:21 pandyas
* Constant for species common name needs to be set for viewModelCharacteristics so it shows up for Zebrafish models
*
* Revision 1.32 2006/08/17 18:10:44 pandyas
* Defect# 410: Externalize properties files - Code changes to get properties
*
* Revision 1.31 2006/05/24 18:37:27 georgeda
* Workaround for bug in caBIO
*
* Revision 1.30 2006/05/09 18:57:54 georgeda
* Changes for searching on transient interfaces
*
* Revision 1.29 2006/05/08 13:43:15 georgeda
* Reformat and clean up warnings
*
* Revision 1.28 2006/04/19 19:31:58 georgeda
* Fixed display issue w/ GeneDelivery
*
* Revision 1.27 2006/04/19 18:50:01 georgeda
* Fixed issue w/ engineered genes displaying
*
* Revision 1.26 2006/04/17 19:09:41 pandyas
* caMod 2.1 OM changes
*
* Revision 1.25 2005/11/21 18:38:31 georgeda
* Defect #35. Trim whitespace from items that are freeform text
*
* Revision 1.24 2005/11/15 22:13:46 georgeda
* Cleanup of drug screening
*
* Revision 1.23 2005/11/14 14:21:44 georgeda
* Added sorting and spontaneous mutation
*
* Revision 1.22 2005/11/11 18:39:30 georgeda
* Removed unneeded call
*
* Revision 1.21 2005/11/10 22:07:36 georgeda
* Fixed part of bug #21
*
* Revision 1.20 2005/11/10 18:12:23 georgeda
* Use constant
*
* Revision 1.19 2005/11/07 13:57:39 georgeda
* Minor tweaks
*
* Revision 1.18 2005/11/03 15:47:11 georgeda
* Fixed slow invivo results
*
* Revision 1.17 2005/10/27 18:13:48 guruswas
* Show all publications in the publications display page.
*
* Revision 1.16 2005/10/20 21:35:37 georgeda
* Fixed xenograft display bug
*
* Revision 1.15 2005/10/19 18:56:00 guruswas
* implemented invivo details page
*
* Revision 1.14 2005/10/11 18:15:25 georgeda
* More comment changes
*
* Revision 1.13 2005/10/10 14:12:24 georgeda
* Changes for comment curation
*
* Revision 1.12 2005/10/07 21:15:03 georgeda
* Added caarray variables
*
* Revision 1.11 2005/10/06 13:37:01 georgeda
* Removed informational message
*
* Revision 1.10 2005/09/30 18:42:24 guruswas
* intial implementation of drug screening search and display page
*
* Revision 1.9 2005/09/22 21:34:51 guruswas
* First stab at carcinogenic intervention pages
*
* Revision 1.8 2005/09/22 15:23:41 georgeda
* Cleaned up warnings
*
* Revision 1.7 2005/09/21 21:02:24 guruswas
* Display the organ, disease names from NCI Thesaurus
*
* Revision 1.6 2005/09/21 20:47:16 georgeda
* Cleaned up
*
* Revision 1.5 2005/09/16 19:30:00 guruswas
* Display invivo data (from DTP) in the therapuetic approaches page
*
* Revision 1.4 2005/09/16 15:52:56 georgeda
* Changes due to manager re-write
*
*
*/
package gov.nih.nci.camod.webapp.action;
import edu.wustl.common.util.CaElmirInterfaceManager;
import gov.nih.nci.cabio.domain.Gene;
//import gov.nih.nci.cabio.domain.impl.GeneImpl;
import gov.nih.nci.camod.Constants;
import gov.nih.nci.camod.domain.Agent;
import gov.nih.nci.camod.domain.AnimalModel;
import gov.nih.nci.camod.domain.CaelmirStudyData;
import gov.nih.nci.camod.domain.CarcinogenExposure;
import gov.nih.nci.camod.domain.Comments;
import gov.nih.nci.camod.domain.EngineeredGene;
import gov.nih.nci.camod.domain.GeneIdentifier;
import gov.nih.nci.camod.domain.GenomicSegment;
import gov.nih.nci.camod.domain.Transplantation;
import gov.nih.nci.camod.domain.InducedMutation;
import gov.nih.nci.camod.domain.Person;
import gov.nih.nci.camod.domain.SpontaneousMutation;
import gov.nih.nci.camod.domain.TargetedModification;
import gov.nih.nci.camod.domain.Therapy;
import gov.nih.nci.camod.domain.Transgene;
import gov.nih.nci.camod.service.AgentManager;
import gov.nih.nci.camod.service.AnimalModelManager;
import gov.nih.nci.camod.service.CommentsManager;
import gov.nih.nci.camod.service.PersonManager;
import gov.nih.nci.camod.service.TransplantationManager;
import gov.nih.nci.camod.service.impl.QueryManagerSingleton;
import gov.nih.nci.camod.util.EvsTreeUtil;
import gov.nih.nci.camod.util.SafeHTMLUtil;
import gov.nih.nci.common.domain.DatabaseCrossReference;
//import gov.nih.nci.common.domain.impl.DatabaseCrossReferenceImpl;
import gov.nih.nci.system.applicationservice.ApplicationService;
import gov.nih.nci.system.applicationservice.CaBioApplicationService;
import gov.nih.nci.system.client.ApplicationServiceProvider;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.Vector;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
public class ViewModelAction extends BaseAction
{
/**
* sets the cancer model object in the session
*
* @param request
* the httpRequest
*/
private void setCancerModel(HttpServletRequest request)
{
String modelID = request.getParameter(Constants.Parameters.MODELID);
log.debug("<setCancerModel> modelID: " + modelID);
AnimalModelManager animalModelManager = (AnimalModelManager) getBean("animalModelManager");
AnimalModel am = null;
try
{
am = animalModelManager.get(modelID);
}
catch (Exception e)
{
log.error("Unable to get cancer model in setCancerModel");
e.printStackTrace();
}
request.getSession().setAttribute(Constants.ANIMALMODEL, am);
// Set model id to display on subViewModelMenu on left menu bar
request.getSession().setAttribute(Constants.MODELID, am.getId().toString());
}
/**
* sets the cancer model object in the session
*
* @param request
* the httpRequest
* @throws Exception
*/
private void setComments(HttpServletRequest request,
String inSection) throws Exception
{
String theCommentsId = request.getParameter(Constants.Parameters.COMMENTSID);
CommentsManager theCommentsManager = (CommentsManager) getBean("commentsManager");
log.debug("Comments id: " + theCommentsId);
List<Comments> theCommentsList = new ArrayList<Comments>();
if (theCommentsId != null && theCommentsId.length() > 0)
{
Comments theComments = theCommentsManager.get(theCommentsId);
if (theComments != null)
{
log.debug("Found a comment: " + theComments.getRemark());
theCommentsList.add(theComments);
}
}
// Get all comments that are either approved or owned by this user
else
{
PersonManager thePersonManager = (PersonManager) getBean("personManager");
Person theCurrentUser = thePersonManager.getByUsername((String) request.getSession().getAttribute(Constants.CURRENTUSER));
AnimalModel theAnimalModel = (AnimalModel) request.getSession().getAttribute(Constants.ANIMALMODEL);
theCommentsList = theCommentsManager.getAllBySection(inSection, theCurrentUser, theAnimalModel);
}
request.setAttribute(Constants.Parameters.COMMENTSLIST, theCommentsList);
}
/**
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward populateModelCharacteristics(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
request.getSession(true);
try {
// get and clean header to prevent SQL injection
String sID = null;
if (request.getHeader("X-Forwarded-For") != null){
sID = request.getHeader("X-Forwarded-For");
log.debug("cleaned X-Forwarded-For: " + sID);
sID = SafeHTMLUtil.clean(sID);
}
// get and clean header to prevent SQL injection
if (request.getHeader("Referer") != null){
sID = request.getHeader("Referer");
log.debug("cleaned Referer: " + sID);
sID = SafeHTMLUtil.clean(sID);
}
// Clean all headers for security scan (careful about what chars you allow)
String headername = "";
for(Enumeration e = request.getHeaderNames(); e.hasMoreElements();){
headername = (String)e.nextElement();
log.debug("populateModelCharacteristics headername: " + headername);
String cleanHeaders = SafeHTMLUtil.clean(headername);
log.debug("populateModelCharacteristics cleaned headername: " + headername);
}
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.info("methodName: " + methodName);
if (!methodName.equals("populateModelCharacteristics")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("cleaned methodName: " + methodName);
}
setCancerModel(request);
setComments(request, Constants.Pages.MODEL_CHARACTERISTICS);
// Call method so therapy link displays for models with caELMIR-only data
//caELMIR server went down and we experienced performance issues trying to connect
//populateCaelmirTherapyDetails(mapping, form, request, response);
}
catch (Exception e)
{
log.error("Error in populateModelCharacteristics", e);
}
return mapping.findForward("viewModelCharacteristics");
}
/**
*
*/
public ActionForward populateEngineeredGene(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
log.debug("<populateEngineeredGene> modelID" + request.getParameter("aModelID"));
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateEngineeredGene")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
String modelID = request.getParameter("aModelID");
AnimalModelManager animalModelManager = (AnimalModelManager) getBean("animalModelManager");
AnimalModel am = animalModelManager.get(modelID);
final Set egc = am.getEngineeredGeneCollection();
final int egcCnt = (egc != null) ? egc.size() : 0;
final List<EngineeredGene> tgc = new ArrayList<EngineeredGene>();
int tgCnt = 0;// Transgene
final List<EngineeredGene> gsc = new ArrayList<EngineeredGene>();
int gsCnt = 0;// GenomicSegment
final List<EngineeredGene> tmc = new ArrayList<EngineeredGene>();
int tmCnt = 0;// TargetedModification
final Map<Long, Gene> tmGeneMap = new HashMap<Long, Gene>();
final List<EngineeredGene> imc = new ArrayList<EngineeredGene>();
final List<SpontaneousMutation> smc = new ArrayList<SpontaneousMutation>(am.getSpontaneousMutationCollection());
Iterator it = egc.iterator();
int imCnt = 0;// InducedMutation
while (it.hasNext())
{
EngineeredGene eg = (EngineeredGene) it.next();
if (eg instanceof Transgene)
{
tgc.add(eg);
tgCnt++;
}
else if (eg instanceof GenomicSegment)
{
gsc.add(eg);
gsCnt++;
}
else if (eg instanceof TargetedModification)
{
tmc.add(eg);
tmCnt++;
// now go to caBIO and query the gene object....
TargetedModification tm = (TargetedModification) eg;
GeneIdentifier geneIdentifier = tm.getGeneIdentifier();
log.debug("geneIdentifier.getEntrezGeneID() " + geneIdentifier.getEntrezGeneID());
if (geneIdentifier != null)
{
log.debug("Connecting to caBIO to look up gene " + geneIdentifier);
// the geneId is available
try
{
CaBioApplicationService appService = (CaBioApplicationService)ApplicationServiceProvider.getApplicationService();
log.info("appService: " + appService.toString());
DatabaseCrossReference dcr = new DatabaseCrossReference();
dcr.setCrossReferenceId(geneIdentifier.getEntrezGeneID());
dcr.setType("gov.nih.nci.cabio.domain.Gene");
dcr.setDataSourceName("LOCUS_LINK_ID");
List<DatabaseCrossReference> cfcoll = new ArrayList<DatabaseCrossReference>();
cfcoll.add(dcr);
Gene myGene = new Gene();
myGene.setDatabaseCrossReferenceCollection(cfcoll);
List resultList = appService.search(Gene.class, myGene);
final int geneCount = (resultList != null) ? resultList.size() : 0;
//log.debug("Got " + geneCount + " Gene Objects");
if (geneCount > 0)
{
myGene = (Gene) resultList.get(0);
log.debug("Gene:" + geneIdentifier + " ==>" + myGene);
tmGeneMap.put(tm.getId(), myGene);
}
}
catch (Exception e)
{
log.error("ViewModelAction Unable to get information from caBIO", e);
}
}
}
else if (eg instanceof InducedMutation)
{
imc.add(eg);
imCnt++;
}
}
log.debug("<populateEngineeredGene> " + "egcCnt=" + egcCnt + "tgc=" + tgCnt + "gsc=" + gsCnt + "tmc=" + tmCnt + "imc=" + imCnt);
request.getSession().setAttribute(Constants.ANIMALMODEL, am);
request.getSession().setAttribute(Constants.TRANSGENE_COLL, tgc);
request.getSession().setAttribute(Constants.GENOMIC_SEG_COLL, gsc);
request.getSession().setAttribute(Constants.TARGETED_MOD_COLL, tmc);
request.getSession().setAttribute(Constants.TARGETED_MOD_GENE_MAP, tmGeneMap);
request.getSession().setAttribute(Constants.INDUCED_MUT_COLL, imc);
request.getSession().setAttribute(Constants.SPONTANEOUS_MUT_COLL, smc);
log.debug("<populateEngineeredGene> set attributes done.");
setComments(request, Constants.Pages.GENETIC_DESCRIPTION);
return mapping.findForward("viewGeneticDescription");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateCarcinogenicInterventions(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateCarcinogenicInterventions")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
String modelID = request.getParameter(Constants.Parameters.MODELID);
AnimalModelManager animalModelManager = (AnimalModelManager) getBean("animalModelManager");
AnimalModel am = animalModelManager.get(modelID);
final Set ceColl = am.getCarcinogenExposureCollection();
Iterator it = ceColl.iterator();
final Map<String, List<Object>> interventionTypeMap = new HashMap<String, List<Object>>();
while (it.hasNext())
{
CarcinogenExposure ce = (CarcinogenExposure) it.next();
if (ce != null)
{
log.debug("Checking agent:" + ce.getEnvironmentalFactor().getNscNumber());
String theType = ce.getEnvironmentalFactor().getType();
if (theType == null || theType.length() == 0)
{
theType = ce.getEnvironmentalFactor().getTypeAlternEntry();
if (theType == null || theType.length() == 0)
{
theType = "Not specified";
}
}
List<Object> theTypeColl = interventionTypeMap.get(theType);
if (theTypeColl == null)
{
theTypeColl = new ArrayList<Object>();
interventionTypeMap.put(theType, theTypeColl);
}
theTypeColl.add(ce);
}
}
if (am.getGeneDeliveryCollection().size() > 0)
{
List<Object> theGeneDeliveryCollection = new ArrayList<Object>(am.getGeneDeliveryCollection());
interventionTypeMap.put("GeneDelivery", theGeneDeliveryCollection);
}
request.getSession().setAttribute(Constants.CARCINOGENIC_INTERVENTIONS_COLL, interventionTypeMap);
setComments(request, Constants.Pages.CARCINOGENIC_INTERVENTION);
return mapping.findForward("viewCarcinogenicInterventions");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populatePublications(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populatePublications")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
String modelID = request.getParameter("aModelID");
List pubs = null;
try
{
pubs = QueryManagerSingleton.instance().getAllPublications(Long.valueOf(modelID).longValue());
log.debug("pubs.size(): " + pubs.size());
}
catch (Exception e)
{
log.error("Unable to get publications");
e.printStackTrace();
}
request.getSession().setAttribute(Constants.PUBLICATIONS, pubs);
setComments(request, Constants.Pages.PUBLICATIONS);
return mapping.findForward("viewPublications");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateHistopathology(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateHistopathology")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
setComments(request, Constants.Pages.HISTOPATHOLOGY);
return mapping.findForward("viewHistopathology");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateTherapeuticApproaches(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
log.info("<ViewModelAction> populateTherapeuticApproaches");
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateTherapeuticApproaches")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
//
// query caBIO and load clinical protocols information
// store clinicalProtocol info in a hashmap keyed by NSC#
//
final HashMap<Long, Collection> clinProtocols = new HashMap<Long, Collection>();
final HashMap<Long, Collection> yeastResults = new HashMap<Long, Collection>();
final HashMap<Long, Collection> invivoResults = new HashMap<Long, Collection>();
final List<Therapy> therapeuticApprochesColl = new ArrayList<Therapy>();
String modelID = request.getParameter(Constants.Parameters.MODELID);
AnimalModelManager animalModelManager = (AnimalModelManager) getBean("animalModelManager");
AnimalModel am = animalModelManager.get(modelID);
final Set therapyColl = am.getTherapyCollection();
Iterator it = therapyColl.iterator();
final int cc = (therapyColl != null) ? therapyColl.size() : 0;
log.debug("Looking up clinical protocols for " + cc + " agents...");
while (it.hasNext())
{
Therapy t = (Therapy) it.next();
if (t != null)
{
therapeuticApprochesColl.add(t);
}
// Sort therapy in order entered as requested by user
Collections.sort(therapeuticApprochesColl);
log.debug("therapeuticApprochesColl: " + therapeuticApprochesColl.toString());
Agent a = t.getAgent();
AgentManager myAgentManager = (AgentManager) getBean("agentManager");
if (a != null)
{
Long nscNumber = a.getNscNumber();
if (nscNumber != null)
{
Collection protocols = myAgentManager.getClinicalProtocols(a);
clinProtocols.put(nscNumber, protocols);
// get the yeast data
List yeastStages = myAgentManager.getYeastResults(a, true);
if (yeastStages.size() > 0)
{
yeastResults.put(a.getId(), yeastStages);
}
// now get invivo/Transplantation data
List transplantationResults = QueryManagerSingleton.instance().getInvivoResults(a, true);
invivoResults.put(a.getId(), transplantationResults);
}
}
}
request.getSession().setAttribute(Constants.THERAPEUTIC_APPROACHES_COLL, therapeuticApprochesColl);
request.getSession().setAttribute(Constants.CLINICAL_PROTOCOLS, clinProtocols);
request.getSession().setAttribute(Constants.YEAST_DATA, yeastResults);
request.getSession().setAttribute(Constants.INVIVO_DATA, invivoResults);
setComments(request, Constants.Pages.THERAPEUTIC_APPROACHES);
//caELMIR server went down and we experienced performance issues trying to connect
//populateCaelmirTherapyDetails(mapping, form, request, response);
return mapping.findForward("viewTherapeuticApproaches");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateCaelmirTherapyDetails(ActionMapping mapping,
ActionForm form, HttpServletRequest request,
HttpServletResponse response) throws Exception {
log.debug("<ViewModelAction> populateCaelmirTherapyDetails Enter");
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateCaelmirTherapyDetails")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
JSONArray jsonArray = new JSONArray();
JSONObject jobj = new JSONObject();
Vector h = new Vector();
ArrayList caelmirStudyData = new ArrayList();
String modelID = request.getParameter(Constants.Parameters.MODELID);
AnimalModelManager theAnimalModelManager = (AnimalModelManager) getBean("animalModelManager");
AnimalModel theAnimalModel = theAnimalModelManager.get(modelID);
try {
log.debug("<ViewModelAction> populateCaelmirTherapyDetails Enter try");
// Link to the inteface provided by caElmir
URL url = new URL("http://chichen-itza.compmed.ucdavis.edu:8080/"
+ CaElmirInterfaceManager.getStudyInfoUrl());
// set your proxy server and port
//System.setProperty("proxyHost", "ptproxy.persistent.co.in");
//System.setProperty("proxyPort", "8080");
URLConnection urlConnection = url.openConnection();
//log.debug("populateCaelmirTherapyDetails open connection");
// needs to be set to True for writing to the output stream.This
// allows to pass data to the url.
urlConnection.setDoOutput(true);
JSONObject jsonObj = new JSONObject();
// setting the model id.
jsonObj.put(CaElmirInterfaceManager.getModelIdParameter(), modelID);
PrintWriter out = new PrintWriter(urlConnection.getOutputStream());
out.write(jsonObj.toString());
out.flush();
out.close();
//log.debug("populateCaelmirTherapyDetails created JSONObject");
// start reading the responce
BufferedReader bufferedReader = new BufferedReader(
new InputStreamReader(urlConnection.getInputStream()));
if (bufferedReader != null) {
String resultStr = (String) bufferedReader.readLine();
jsonArray = new JSONArray(resultStr);
String status = null;
status = ((JSONObject) jsonArray.get(0)).get(
CaElmirInterfaceManager.getStatusMessageKey())
.toString();
//log.debug("populateCaelmirTherapyDetails status: " + status);
// Imporatant:first check for the status
if (!CaElmirInterfaceManager.getSuccessKey().equals(status)) {
// prints the status
log.info(status);
}
CaelmirStudyData studyData = new CaelmirStudyData();
// start reading study data from index 1
for (int i = 1; i < jsonArray.length(); i++) {
jobj = (JSONObject) jsonArray.get(i);
studyData = new CaelmirStudyData();
studyData.setDescription(jobj.getString(CaElmirInterfaceManager.getStudyDesrciptionKey()));
studyData.setEmail(jobj.getString(CaElmirInterfaceManager.getEmailKey()));
studyData.setHypothesis(jobj.getString(CaElmirInterfaceManager.getStudyHypothesisKey()));
studyData.setInstitution(jobj.getString(CaElmirInterfaceManager.getInstitutionKey()));
studyData.setInvestigatorName(jobj.getString(CaElmirInterfaceManager.getPrimaryInvestigatorKey()));
studyData.setStudyName(jobj.getString(CaElmirInterfaceManager.getStudyName()));
studyData.setUrl(jobj.getString(CaElmirInterfaceManager.getStudyUrlKey()));
caelmirStudyData.add(studyData);
}
}
} catch (MalformedURLException me) {
log.debug("MalformedURLException: " + me);
} catch (IOException ioe) {
log.debug("IOException: " + ioe);
}
// Set collection so therapy link will display if caELMIR data is available
// Needed for models with caELMIR data but no caMOD data
theAnimalModel.setCaelmirStudyDataCollection(caelmirStudyData);
request.getSession().setAttribute(Constants.CAELMIR_STUDY_DATA,
caelmirStudyData);
return mapping.findForward("viewTherapeuticApproaches");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateCellLines(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateCellLines")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
setComments(request, Constants.Pages.CELL_LINES);
return mapping.findForward("viewCellLines");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateTransientInterference(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateTransientInterference")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
setComments(request, Constants.Pages.TRANSIENT_INTERFERENCE);
return mapping.findForward("viewTransientInterference");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateImages(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateImages")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
setComments(request, Constants.Pages.IMAGES);
return mapping.findForward("viewImages");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateMicroarrays(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateMicroarrays")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
//Get external properties file
Properties camodProperties = new Properties();
String camodPropertiesFileName = null;
camodPropertiesFileName = System.getProperty("gov.nih.nci.camod.camodProperties");
try {
FileInputStream in = new FileInputStream(camodPropertiesFileName);
camodProperties.load(in);
}
catch (FileNotFoundException e) {
log.error("Caught exception finding file for properties: ", e);
e.printStackTrace();
} catch (IOException e) {
log.error("Caught exception finding file for properties: ", e);
e.printStackTrace();
}
request.setAttribute("uri_start", camodProperties.getProperty("caarray.uri_start"));
request.setAttribute("uri_end", camodProperties.getProperty("caarray.uri_end"));
setComments(request, Constants.Pages.MICROARRAY);
return mapping.findForward("viewMicroarrays");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateTransplantation(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
log.debug("<populateTransplantation> Enter:");
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateTransplantation")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
setComments(request, Constants.Pages.TRANSPLANTATION);
log.debug("<populateTransplantation> Exit:");
return mapping.findForward("viewTransplantation");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateTransplantationDetails(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
log.debug("<populateTransplantationDetails> Enter:");
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateTransplantationDetails")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
String modelID = request.getParameter("tModelID");
request.getSession().setAttribute(Constants.MODELID, modelID);
String nsc = request.getParameter("nsc");
if (nsc != null && nsc.length() == 0)
return mapping.findForward("viewModelCharacteristics");
log.debug("<populateTransplantationDetails> modelID:" + modelID);
log.debug("<populateTransplantationDetails> nsc:" + nsc);
TransplantationManager mgr = (TransplantationManager) getBean("transplantationManager");
Transplantation t = mgr.get(modelID);
request.getSession().setAttribute(Constants.TRANSPLANTATIONMODEL, t);
request.getSession().setAttribute(Constants.NSC_NUMBER, nsc);
request.getSession().setAttribute(Constants.TRANSPLANTATIONRESULTLIST, t.getInvivoResultCollectionByNSC(nsc));
return mapping.findForward("viewInvivoDetails");
}
}
| software/camod/src/gov/nih/nci/camod/webapp/action/ViewModelAction.java | /**
* @author sguruswami
*
* $Id: ViewModelAction.java,v 1.69 2009-06-01 17:02:46 pandyas Exp $
*
* $Log: not supported by cvs2svn $
* Revision 1.68 2009/05/20 17:16:34 pandyas
* modified for gforge #17325 Upgrade caMOD to use caBIO 4.x and EVS 4.x to get data
*
* Revision 1.67 2009/03/25 16:24:58 pandyas
* modified for #17833 Make sure all references to Tranplantation are properly named
*
* Revision 1.66 2009/03/13 17:03:46 pandyas
* modified for #19205 Sort therapies in the order they are entered
*
* Revision 1.65 2008/08/14 17:07:03 pandyas
* remove debug line
*
* Revision 1.64 2008/08/14 17:01:42 pandyas
* modified debug line to use log
*
* Revision 1.63 2008/08/01 14:15:10 pandyas
* Modifed to prevent SQL inject - added HTTP Header clean
* App scan performed on July 30, 2008
*
* Revision 1.62 2008/07/28 17:19:02 pandyas
* Modifed to prevent SQL inject - added HTTP Header
* App scan performed on July 24, 2008
*
* Revision 1.61 2008/07/21 18:08:31 pandyas
* Modified to prevent SQL injection
* Scan performed on July 21, 2008
*
* Revision 1.60 2008/07/17 19:05:26 pandyas
* Modified to clean header to prevent SQL injection/Cross-Site Scripting
* Scan performed on July 16, 2008 by IRT
*
* Revision 1.59 2008/06/30 18:18:28 pandyas
* Removed code originally added for security scan when it caused null pointer errors
*
* Revision 1.58 2008/06/30 15:29:05 pandyas
* Modified to prevent Cross-Site Scripting
* Cleaned parameter name before proceeding
* Fixed code added in previous version
*
* Revision 1.57 2008/05/27 14:36:40 pandyas
* Modified to prevent SQL injection
* Cleaned HTTP Header before proceeding
* Re: Apps Scan run 05/23/2008
*
* Revision 1.56 2008/02/05 17:10:09 pandyas
* Removed debug statement for build to dev
*
* Revision 1.55 2008/02/05 17:09:34 pandyas
* Removed debug statement for build to dev
*
* Revision 1.54 2008/01/31 22:27:52 pandyas
* remove log printouts now that bug is resolved
*
* Revision 1.53 2008/01/31 22:23:22 pandyas
* remove log printouts now that bug is resolved
*
* Revision 1.52 2008/01/31 17:09:54 pandyas
* Modified to send new gene identifier (entrez gene id) to caBIO from new object location
*
* Revision 1.51 2008/01/28 18:45:18 pandyas
* Modified to debug caBIO data not returning to caMOD on dev
*
* Revision 1.50 2008/01/16 20:09:31 pandyas
* removed caBIO logging so the page renders when connection to caBIO fails
*
* Revision 1.49 2008/01/16 18:29:57 pandyas
* Renamed value to Transplant for #8290
*
* Revision 1.48 2008/01/10 15:55:01 pandyas
* modify output for final dev deployment
*
* Revision 1.47 2008/01/02 17:57:44 pandyas
* modified for #816 Connection to caELMIR - retrieve data for therapy search page
*
* Revision 1.46 2007/12/27 22:32:33 pandyas
* Modified for feature #8816 Connection to caELMIR - retrieve data for therapy search page
* Also added code to display Therapy link when only caELMIR data is available for a study
*
* Revision 1.45 2007/12/27 21:44:00 pandyas
* re-commit - changes did not show up in project
*
* Revision 1.44 2007/12/18 13:31:32 pandyas
* Added populate method for study data from caELMIRE for integration of Therapy study data
*
* Revision 1.43 2007/12/17 18:03:22 pandyas
* Removed * in searchFilter used for getting e-mail from LDAP
* Apps Support ticket was submitted (31169 - incorrect e-mail associated with my caMOD account) stating:
*
* Cheryl Marks submitted a ticket to NCICB Application Support in which she requested that the e-mail address associated with her account in the "User Settings" screen in caMOD be corrected. She has attempted to correct it herself, but because the program queries the LDAP Server for the e-mail address, her corrections were not retained.
*
* Revision 1.42 2007/12/04 13:49:19 pandyas
* Modified code for #8816 Connection to caELMIR - retrieve data for therapy search page
*
* Revision 1.41 2007/11/25 23:34:23 pandyas
* Initial version for feature #8816 Connection to caELMIR - retrieve data for therapy search page
*
* Revision 1.40 2007/10/31 18:39:30 pandyas
* Fixed #8188 Rename UnctrlVocab items to text entries
* Fixed #8290 Rename graft object into transplant object
*
* Revision 1.39 2007/09/14 18:53:37 pandyas
* Fixed Bug #8954: link to invivo detail page does not work
*
* Revision 1.38 2007/09/12 19:36:40 pandyas
* modified debug statements for build to stage tier
*
* Revision 1.37 2007/08/07 19:49:46 pandyas
* Removed reference to Transplant as per VCDE comments and after modification to object definition for CDE
*
* Revision 1.36 2007/08/07 18:26:20 pandyas
* Renamed to GRAFT as per VCDE comments
*
* Revision 1.35 2007/07/31 12:02:55 pandyas
* VCDE silver level and caMOD 2.3 changes
*
* Revision 1.34 2007/06/19 20:42:59 pandyas
* Users not logged in can not access the session property to check the model species. Therefore, we must show the attribute for all models.
*
* Revision 1.33 2007/06/19 18:39:21 pandyas
* Constant for species common name needs to be set for viewModelCharacteristics so it shows up for Zebrafish models
*
* Revision 1.32 2006/08/17 18:10:44 pandyas
* Defect# 410: Externalize properties files - Code changes to get properties
*
* Revision 1.31 2006/05/24 18:37:27 georgeda
* Workaround for bug in caBIO
*
* Revision 1.30 2006/05/09 18:57:54 georgeda
* Changes for searching on transient interfaces
*
* Revision 1.29 2006/05/08 13:43:15 georgeda
* Reformat and clean up warnings
*
* Revision 1.28 2006/04/19 19:31:58 georgeda
* Fixed display issue w/ GeneDelivery
*
* Revision 1.27 2006/04/19 18:50:01 georgeda
* Fixed issue w/ engineered genes displaying
*
* Revision 1.26 2006/04/17 19:09:41 pandyas
* caMod 2.1 OM changes
*
* Revision 1.25 2005/11/21 18:38:31 georgeda
* Defect #35. Trim whitespace from items that are freeform text
*
* Revision 1.24 2005/11/15 22:13:46 georgeda
* Cleanup of drug screening
*
* Revision 1.23 2005/11/14 14:21:44 georgeda
* Added sorting and spontaneous mutation
*
* Revision 1.22 2005/11/11 18:39:30 georgeda
* Removed unneeded call
*
* Revision 1.21 2005/11/10 22:07:36 georgeda
* Fixed part of bug #21
*
* Revision 1.20 2005/11/10 18:12:23 georgeda
* Use constant
*
* Revision 1.19 2005/11/07 13:57:39 georgeda
* Minor tweaks
*
* Revision 1.18 2005/11/03 15:47:11 georgeda
* Fixed slow invivo results
*
* Revision 1.17 2005/10/27 18:13:48 guruswas
* Show all publications in the publications display page.
*
* Revision 1.16 2005/10/20 21:35:37 georgeda
* Fixed xenograft display bug
*
* Revision 1.15 2005/10/19 18:56:00 guruswas
* implemented invivo details page
*
* Revision 1.14 2005/10/11 18:15:25 georgeda
* More comment changes
*
* Revision 1.13 2005/10/10 14:12:24 georgeda
* Changes for comment curation
*
* Revision 1.12 2005/10/07 21:15:03 georgeda
* Added caarray variables
*
* Revision 1.11 2005/10/06 13:37:01 georgeda
* Removed informational message
*
* Revision 1.10 2005/09/30 18:42:24 guruswas
* intial implementation of drug screening search and display page
*
* Revision 1.9 2005/09/22 21:34:51 guruswas
* First stab at carcinogenic intervention pages
*
* Revision 1.8 2005/09/22 15:23:41 georgeda
* Cleaned up warnings
*
* Revision 1.7 2005/09/21 21:02:24 guruswas
* Display the organ, disease names from NCI Thesaurus
*
* Revision 1.6 2005/09/21 20:47:16 georgeda
* Cleaned up
*
* Revision 1.5 2005/09/16 19:30:00 guruswas
* Display invivo data (from DTP) in the therapuetic approaches page
*
* Revision 1.4 2005/09/16 15:52:56 georgeda
* Changes due to manager re-write
*
*
*/
package gov.nih.nci.camod.webapp.action;
import edu.wustl.common.util.CaElmirInterfaceManager;
import gov.nih.nci.cabio.domain.Gene;
//import gov.nih.nci.cabio.domain.impl.GeneImpl;
import gov.nih.nci.camod.Constants;
import gov.nih.nci.camod.domain.Agent;
import gov.nih.nci.camod.domain.AnimalModel;
import gov.nih.nci.camod.domain.CaelmirStudyData;
import gov.nih.nci.camod.domain.CarcinogenExposure;
import gov.nih.nci.camod.domain.Comments;
import gov.nih.nci.camod.domain.EngineeredGene;
import gov.nih.nci.camod.domain.GeneIdentifier;
import gov.nih.nci.camod.domain.GenomicSegment;
import gov.nih.nci.camod.domain.Transplantation;
import gov.nih.nci.camod.domain.InducedMutation;
import gov.nih.nci.camod.domain.Person;
import gov.nih.nci.camod.domain.SpontaneousMutation;
import gov.nih.nci.camod.domain.TargetedModification;
import gov.nih.nci.camod.domain.Therapy;
import gov.nih.nci.camod.domain.Transgene;
import gov.nih.nci.camod.service.AgentManager;
import gov.nih.nci.camod.service.AnimalModelManager;
import gov.nih.nci.camod.service.CommentsManager;
import gov.nih.nci.camod.service.PersonManager;
import gov.nih.nci.camod.service.TransplantationManager;
import gov.nih.nci.camod.service.impl.QueryManagerSingleton;
import gov.nih.nci.camod.util.EvsTreeUtil;
import gov.nih.nci.camod.util.SafeHTMLUtil;
import gov.nih.nci.common.domain.DatabaseCrossReference;
//import gov.nih.nci.common.domain.impl.DatabaseCrossReferenceImpl;
import gov.nih.nci.system.applicationservice.ApplicationService;
import gov.nih.nci.system.applicationservice.CaBioApplicationService;
import gov.nih.nci.system.client.ApplicationServiceProvider;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.Vector;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import net.sf.json.JSONArray;
import net.sf.json.JSONObject;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
public class ViewModelAction extends BaseAction
{
/**
* sets the cancer model object in the session
*
* @param request
* the httpRequest
*/
private void setCancerModel(HttpServletRequest request)
{
String modelID = request.getParameter(Constants.Parameters.MODELID);
log.debug("<setCancerModel> modelID: " + modelID);
AnimalModelManager animalModelManager = (AnimalModelManager) getBean("animalModelManager");
AnimalModel am = null;
try
{
am = animalModelManager.get(modelID);
}
catch (Exception e)
{
log.error("Unable to get cancer model in setCancerModel");
e.printStackTrace();
}
request.getSession().setAttribute(Constants.ANIMALMODEL, am);
// Set model id to display on subViewModelMenu on left menu bar
request.getSession().setAttribute(Constants.MODELID, am.getId().toString());
}
/**
* sets the cancer model object in the session
*
* @param request
* the httpRequest
* @throws Exception
*/
private void setComments(HttpServletRequest request,
String inSection) throws Exception
{
String theCommentsId = request.getParameter(Constants.Parameters.COMMENTSID);
CommentsManager theCommentsManager = (CommentsManager) getBean("commentsManager");
log.debug("Comments id: " + theCommentsId);
List<Comments> theCommentsList = new ArrayList<Comments>();
if (theCommentsId != null && theCommentsId.length() > 0)
{
Comments theComments = theCommentsManager.get(theCommentsId);
if (theComments != null)
{
log.debug("Found a comment: " + theComments.getRemark());
theCommentsList.add(theComments);
}
}
// Get all comments that are either approved or owned by this user
else
{
PersonManager thePersonManager = (PersonManager) getBean("personManager");
Person theCurrentUser = thePersonManager.getByUsername((String) request.getSession().getAttribute(Constants.CURRENTUSER));
AnimalModel theAnimalModel = (AnimalModel) request.getSession().getAttribute(Constants.ANIMALMODEL);
theCommentsList = theCommentsManager.getAllBySection(inSection, theCurrentUser, theAnimalModel);
}
request.setAttribute(Constants.Parameters.COMMENTSLIST, theCommentsList);
}
/**
* @param mapping
* @param form
* @param request
* @param response
* @return
* @throws Exception
*/
public ActionForward populateModelCharacteristics(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
request.getSession(true);
try {
// get and clean header to prevent SQL injection
String sID = null;
if (request.getHeader("X-Forwarded-For") != null){
sID = request.getHeader("X-Forwarded-For");
log.debug("cleaned X-Forwarded-For: " + sID);
sID = SafeHTMLUtil.clean(sID);
}
// get and clean header to prevent SQL injection
if (request.getHeader("Referer") != null){
sID = request.getHeader("Referer");
log.debug("cleaned Referer: " + sID);
sID = SafeHTMLUtil.clean(sID);
}
// Clean all headers for security scan (careful about what chars you allow)
String headername = "";
for(Enumeration e = request.getHeaderNames(); e.hasMoreElements();){
headername = (String)e.nextElement();
log.debug("populateModelCharacteristics headername: " + headername);
String cleanHeaders = SafeHTMLUtil.clean(headername);
log.debug("populateModelCharacteristics cleaned headername: " + headername);
}
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.info("methodName: " + methodName);
if (!methodName.equals("populateModelCharacteristics")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("cleaned methodName: " + methodName);
}
setCancerModel(request);
setComments(request, Constants.Pages.MODEL_CHARACTERISTICS);
// Call method so therapy link displays for models with caELMIR-only data
//caELMIR server went down and we experienced performance issues trying to connect
//populateCaelmirTherapyDetails(mapping, form, request, response);
}
catch (Exception e)
{
log.error("Error in populateModelCharacteristics", e);
}
return mapping.findForward("viewModelCharacteristics");
}
/**
*
*/
public ActionForward populateEngineeredGene(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
log.debug("<populateEngineeredGene> modelID" + request.getParameter("aModelID"));
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateEngineeredGene")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
String modelID = request.getParameter("aModelID");
AnimalModelManager animalModelManager = (AnimalModelManager) getBean("animalModelManager");
AnimalModel am = animalModelManager.get(modelID);
final Set egc = am.getEngineeredGeneCollection();
final int egcCnt = (egc != null) ? egc.size() : 0;
final List<EngineeredGene> tgc = new ArrayList<EngineeredGene>();
int tgCnt = 0;// Transgene
final List<EngineeredGene> gsc = new ArrayList<EngineeredGene>();
int gsCnt = 0;// GenomicSegment
final List<EngineeredGene> tmc = new ArrayList<EngineeredGene>();
int tmCnt = 0;// TargetedModification
final Map<Long, Gene> tmGeneMap = new HashMap<Long, Gene>();
final List<EngineeredGene> imc = new ArrayList<EngineeredGene>();
final List<SpontaneousMutation> smc = new ArrayList<SpontaneousMutation>(am.getSpontaneousMutationCollection());
Iterator it = egc.iterator();
int imCnt = 0;// InducedMutation
while (it.hasNext())
{
EngineeredGene eg = (EngineeredGene) it.next();
if (eg instanceof Transgene)
{
tgc.add(eg);
tgCnt++;
}
else if (eg instanceof GenomicSegment)
{
gsc.add(eg);
gsCnt++;
}
else if (eg instanceof TargetedModification)
{
tmc.add(eg);
tmCnt++;
// now go to caBIO and query the gene object....
TargetedModification tm = (TargetedModification) eg;
GeneIdentifier geneIdentifier = tm.getGeneIdentifier();
log.debug("geneIdentifier.getEntrezGeneID() " + geneIdentifier.getEntrezGeneID());
if (geneIdentifier != null)
{
log.debug("Connecting to caBIO to look up gene " + geneIdentifier);
// the geneId is available
try
{
CaBioApplicationService appService = (CaBioApplicationService)ApplicationServiceProvider.getApplicationService();
log.info("appService: " + appService.toString());
DatabaseCrossReference dcr = new DatabaseCrossReference();
dcr.setCrossReferenceId(geneIdentifier.getEntrezGeneID());
dcr.setType("gov.nih.nci.cabio.domain.Gene");
dcr.setDataSourceName("LOCUS_LINK_ID");
List<DatabaseCrossReference> cfcoll = new ArrayList<DatabaseCrossReference>();
cfcoll.add(dcr);
Gene myGene = new Gene();
myGene.setDatabaseCrossReferenceCollection(cfcoll);
List resultList = appService.search(Gene.class, myGene);
final int geneCount = (resultList != null) ? resultList.size() : 0;
//log.debug("Got " + geneCount + " Gene Objects");
if (geneCount > 0)
{
myGene = (Gene) resultList.get(0);
log.debug("Gene:" + geneIdentifier + " ==>" + myGene);
tmGeneMap.put(tm.getId(), myGene);
}
}
catch (Exception e)
{
log.error("Unable to get information from caBIO", e);
}
}
}
else if (eg instanceof InducedMutation)
{
imc.add(eg);
imCnt++;
}
}
log.debug("<populateEngineeredGene> " + "egcCnt=" + egcCnt + "tgc=" + tgCnt + "gsc=" + gsCnt + "tmc=" + tmCnt + "imc=" + imCnt);
request.getSession().setAttribute(Constants.ANIMALMODEL, am);
request.getSession().setAttribute(Constants.TRANSGENE_COLL, tgc);
request.getSession().setAttribute(Constants.GENOMIC_SEG_COLL, gsc);
request.getSession().setAttribute(Constants.TARGETED_MOD_COLL, tmc);
request.getSession().setAttribute(Constants.TARGETED_MOD_GENE_MAP, tmGeneMap);
request.getSession().setAttribute(Constants.INDUCED_MUT_COLL, imc);
request.getSession().setAttribute(Constants.SPONTANEOUS_MUT_COLL, smc);
log.debug("<populateEngineeredGene> set attributes done.");
setComments(request, Constants.Pages.GENETIC_DESCRIPTION);
return mapping.findForward("viewGeneticDescription");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateCarcinogenicInterventions(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateCarcinogenicInterventions")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
String modelID = request.getParameter(Constants.Parameters.MODELID);
AnimalModelManager animalModelManager = (AnimalModelManager) getBean("animalModelManager");
AnimalModel am = animalModelManager.get(modelID);
final Set ceColl = am.getCarcinogenExposureCollection();
Iterator it = ceColl.iterator();
final Map<String, List<Object>> interventionTypeMap = new HashMap<String, List<Object>>();
while (it.hasNext())
{
CarcinogenExposure ce = (CarcinogenExposure) it.next();
if (ce != null)
{
log.debug("Checking agent:" + ce.getEnvironmentalFactor().getNscNumber());
String theType = ce.getEnvironmentalFactor().getType();
if (theType == null || theType.length() == 0)
{
theType = ce.getEnvironmentalFactor().getTypeAlternEntry();
if (theType == null || theType.length() == 0)
{
theType = "Not specified";
}
}
List<Object> theTypeColl = interventionTypeMap.get(theType);
if (theTypeColl == null)
{
theTypeColl = new ArrayList<Object>();
interventionTypeMap.put(theType, theTypeColl);
}
theTypeColl.add(ce);
}
}
if (am.getGeneDeliveryCollection().size() > 0)
{
List<Object> theGeneDeliveryCollection = new ArrayList<Object>(am.getGeneDeliveryCollection());
interventionTypeMap.put("GeneDelivery", theGeneDeliveryCollection);
}
request.getSession().setAttribute(Constants.CARCINOGENIC_INTERVENTIONS_COLL, interventionTypeMap);
setComments(request, Constants.Pages.CARCINOGENIC_INTERVENTION);
return mapping.findForward("viewCarcinogenicInterventions");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populatePublications(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populatePublications")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
String modelID = request.getParameter("aModelID");
List pubs = null;
try
{
pubs = QueryManagerSingleton.instance().getAllPublications(Long.valueOf(modelID).longValue());
log.debug("pubs.size(): " + pubs.size());
}
catch (Exception e)
{
log.error("Unable to get publications");
e.printStackTrace();
}
request.getSession().setAttribute(Constants.PUBLICATIONS, pubs);
setComments(request, Constants.Pages.PUBLICATIONS);
return mapping.findForward("viewPublications");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateHistopathology(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateHistopathology")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
setComments(request, Constants.Pages.HISTOPATHOLOGY);
return mapping.findForward("viewHistopathology");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateTherapeuticApproaches(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
log.info("<ViewModelAction> populateTherapeuticApproaches");
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateTherapeuticApproaches")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
//
// query caBIO and load clinical protocols information
// store clinicalProtocol info in a hashmap keyed by NSC#
//
final HashMap<Long, Collection> clinProtocols = new HashMap<Long, Collection>();
final HashMap<Long, Collection> yeastResults = new HashMap<Long, Collection>();
final HashMap<Long, Collection> invivoResults = new HashMap<Long, Collection>();
final List<Therapy> therapeuticApprochesColl = new ArrayList<Therapy>();
String modelID = request.getParameter(Constants.Parameters.MODELID);
AnimalModelManager animalModelManager = (AnimalModelManager) getBean("animalModelManager");
AnimalModel am = animalModelManager.get(modelID);
final Set therapyColl = am.getTherapyCollection();
Iterator it = therapyColl.iterator();
final int cc = (therapyColl != null) ? therapyColl.size() : 0;
log.debug("Looking up clinical protocols for " + cc + " agents...");
while (it.hasNext())
{
Therapy t = (Therapy) it.next();
if (t != null)
{
therapeuticApprochesColl.add(t);
}
// Sort therapy in order entered as requested by user
Collections.sort(therapeuticApprochesColl);
log.debug("therapeuticApprochesColl: " + therapeuticApprochesColl.toString());
Agent a = t.getAgent();
AgentManager myAgentManager = (AgentManager) getBean("agentManager");
if (a != null)
{
Long nscNumber = a.getNscNumber();
if (nscNumber != null)
{
Collection protocols = myAgentManager.getClinicalProtocols(a);
clinProtocols.put(nscNumber, protocols);
// get the yeast data
List yeastStages = myAgentManager.getYeastResults(a, true);
if (yeastStages.size() > 0)
{
yeastResults.put(a.getId(), yeastStages);
}
// now get invivo/Transplantation data
List transplantationResults = QueryManagerSingleton.instance().getInvivoResults(a, true);
invivoResults.put(a.getId(), transplantationResults);
}
}
}
request.getSession().setAttribute(Constants.THERAPEUTIC_APPROACHES_COLL, therapeuticApprochesColl);
request.getSession().setAttribute(Constants.CLINICAL_PROTOCOLS, clinProtocols);
request.getSession().setAttribute(Constants.YEAST_DATA, yeastResults);
request.getSession().setAttribute(Constants.INVIVO_DATA, invivoResults);
setComments(request, Constants.Pages.THERAPEUTIC_APPROACHES);
//caELMIR server went down and we experienced performance issues trying to connect
//populateCaelmirTherapyDetails(mapping, form, request, response);
return mapping.findForward("viewTherapeuticApproaches");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateCaelmirTherapyDetails(ActionMapping mapping,
ActionForm form, HttpServletRequest request,
HttpServletResponse response) throws Exception {
log.debug("<ViewModelAction> populateCaelmirTherapyDetails Enter");
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateCaelmirTherapyDetails")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
JSONArray jsonArray = new JSONArray();
JSONObject jobj = new JSONObject();
Vector h = new Vector();
ArrayList caelmirStudyData = new ArrayList();
String modelID = request.getParameter(Constants.Parameters.MODELID);
AnimalModelManager theAnimalModelManager = (AnimalModelManager) getBean("animalModelManager");
AnimalModel theAnimalModel = theAnimalModelManager.get(modelID);
try {
log.debug("<ViewModelAction> populateCaelmirTherapyDetails Enter try");
// Link to the inteface provided by caElmir
URL url = new URL("http://chichen-itza.compmed.ucdavis.edu:8080/"
+ CaElmirInterfaceManager.getStudyInfoUrl());
// set your proxy server and port
//System.setProperty("proxyHost", "ptproxy.persistent.co.in");
//System.setProperty("proxyPort", "8080");
URLConnection urlConnection = url.openConnection();
//log.debug("populateCaelmirTherapyDetails open connection");
// needs to be set to True for writing to the output stream.This
// allows to pass data to the url.
urlConnection.setDoOutput(true);
JSONObject jsonObj = new JSONObject();
// setting the model id.
jsonObj.put(CaElmirInterfaceManager.getModelIdParameter(), modelID);
PrintWriter out = new PrintWriter(urlConnection.getOutputStream());
out.write(jsonObj.toString());
out.flush();
out.close();
//log.debug("populateCaelmirTherapyDetails created JSONObject");
// start reading the responce
BufferedReader bufferedReader = new BufferedReader(
new InputStreamReader(urlConnection.getInputStream()));
if (bufferedReader != null) {
String resultStr = (String) bufferedReader.readLine();
jsonArray = new JSONArray(resultStr);
String status = null;
status = ((JSONObject) jsonArray.get(0)).get(
CaElmirInterfaceManager.getStatusMessageKey())
.toString();
//log.debug("populateCaelmirTherapyDetails status: " + status);
// Imporatant:first check for the status
if (!CaElmirInterfaceManager.getSuccessKey().equals(status)) {
// prints the status
log.info(status);
}
CaelmirStudyData studyData = new CaelmirStudyData();
// start reading study data from index 1
for (int i = 1; i < jsonArray.length(); i++) {
jobj = (JSONObject) jsonArray.get(i);
studyData = new CaelmirStudyData();
studyData.setDescription(jobj.getString(CaElmirInterfaceManager.getStudyDesrciptionKey()));
studyData.setEmail(jobj.getString(CaElmirInterfaceManager.getEmailKey()));
studyData.setHypothesis(jobj.getString(CaElmirInterfaceManager.getStudyHypothesisKey()));
studyData.setInstitution(jobj.getString(CaElmirInterfaceManager.getInstitutionKey()));
studyData.setInvestigatorName(jobj.getString(CaElmirInterfaceManager.getPrimaryInvestigatorKey()));
studyData.setStudyName(jobj.getString(CaElmirInterfaceManager.getStudyName()));
studyData.setUrl(jobj.getString(CaElmirInterfaceManager.getStudyUrlKey()));
caelmirStudyData.add(studyData);
}
}
} catch (MalformedURLException me) {
log.debug("MalformedURLException: " + me);
} catch (IOException ioe) {
log.debug("IOException: " + ioe);
}
// Set collection so therapy link will display if caELMIR data is available
// Needed for models with caELMIR data but no caMOD data
theAnimalModel.setCaelmirStudyDataCollection(caelmirStudyData);
request.getSession().setAttribute(Constants.CAELMIR_STUDY_DATA,
caelmirStudyData);
return mapping.findForward("viewTherapeuticApproaches");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateCellLines(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateCellLines")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
setComments(request, Constants.Pages.CELL_LINES);
return mapping.findForward("viewCellLines");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateTransientInterference(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateTransientInterference")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
setComments(request, Constants.Pages.TRANSIENT_INTERFERENCE);
return mapping.findForward("viewTransientInterference");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateImages(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateImages")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
setComments(request, Constants.Pages.IMAGES);
return mapping.findForward("viewImages");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateMicroarrays(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateMicroarrays")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
//Get external properties file
Properties camodProperties = new Properties();
String camodPropertiesFileName = null;
camodPropertiesFileName = System.getProperty("gov.nih.nci.camod.camodProperties");
try {
FileInputStream in = new FileInputStream(camodPropertiesFileName);
camodProperties.load(in);
}
catch (FileNotFoundException e) {
log.error("Caught exception finding file for properties: ", e);
e.printStackTrace();
} catch (IOException e) {
log.error("Caught exception finding file for properties: ", e);
e.printStackTrace();
}
request.setAttribute("uri_start", camodProperties.getProperty("caarray.uri_start"));
request.setAttribute("uri_end", camodProperties.getProperty("caarray.uri_end"));
setComments(request, Constants.Pages.MICROARRAY);
return mapping.findForward("viewMicroarrays");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateTransplantation(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
log.debug("<populateTransplantation> Enter:");
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateTransplantation")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
setCancerModel(request);
setComments(request, Constants.Pages.TRANSPLANTATION);
log.debug("<populateTransplantation> Exit:");
return mapping.findForward("viewTransplantation");
}
/**
* Populate the session and/or request with the objects necessary to display
* the page.
*
* @param mapping
* the struts action mapping
* @param form
* the web form
* @param request
* HTTPRequest
* @param response
* HTTPResponse
* @return
* @throws Exception
*/
public ActionForward populateTransplantationDetails(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response) throws Exception
{
log.debug("<populateTransplantationDetails> Enter:");
// Get and clean method to prevent Cross-Site Scripting
String methodName = request.getParameter("unprotected_method");
log.debug("methodName: " + methodName);
if (!methodName.equals("populateTransplantationDetails")){
methodName = SafeHTMLUtil.clean(methodName);
log.debug("methodName: " + methodName);
}
String modelID = request.getParameter("tModelID");
request.getSession().setAttribute(Constants.MODELID, modelID);
String nsc = request.getParameter("nsc");
if (nsc != null && nsc.length() == 0)
return mapping.findForward("viewModelCharacteristics");
log.debug("<populateTransplantationDetails> modelID:" + modelID);
log.debug("<populateTransplantationDetails> nsc:" + nsc);
TransplantationManager mgr = (TransplantationManager) getBean("transplantationManager");
Transplantation t = mgr.get(modelID);
request.getSession().setAttribute(Constants.TRANSPLANTATIONMODEL, t);
request.getSession().setAttribute(Constants.NSC_NUMBER, nsc);
request.getSession().setAttribute(Constants.TRANSPLANTATIONRESULTLIST, t.getInvivoResultCollectionByNSC(nsc));
return mapping.findForward("viewInvivoDetails");
}
}
| modified for gforge #25918 Upgrade caMOD to use caBIO 4.x to get data
SVN-Revision: 5509
| software/camod/src/gov/nih/nci/camod/webapp/action/ViewModelAction.java | modified for gforge #25918 Upgrade caMOD to use caBIO 4.x to get data | <ide><path>oftware/camod/src/gov/nih/nci/camod/webapp/action/ViewModelAction.java
<ide> }
<ide> catch (Exception e)
<ide> {
<del> log.error("Unable to get information from caBIO", e);
<add> log.error("ViewModelAction Unable to get information from caBIO", e);
<ide> }
<ide>
<ide> } |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.